hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
97921cf1eee79331bab437adeb7285eb8cddd9fc7de3bd6cd92b6f3d664a1eaf | """
Functions for creating and restoring url-safe signed JSON objects.
The format used looks like this:
>>> signing.dumps("hello")
'ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk'
There are two components here, separated by a ':'. The first component is a
URLsafe base64 encoded JSON of the object passed to dumps(). The second
component is a base64 encoded hmac/SHA-256 hash of "$first_component:$secret"
signing.loads(s) checks the signature and returns the deserialized object.
If the signature fails, a BadSignature exception is raised.
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk")
'hello'
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv42-modified")
...
BadSignature: Signature "ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv42-modified" does not match
You can optionally compress the JSON prior to base64 encoding it to save
space, using the compress=True argument. This checks if compression actually
helps and only applies compression if the result is a shorter string:
>>> signing.dumps(list(range(1, 20)), compress=True)
'.eJwFwcERACAIwLCF-rCiILN47r-GyZVJsNgkxaFxoDgxcOHGxMKD_T7vhAml:1QaUaL:BA0thEZrp4FQVXIXuOvYJtLJSrQ'
The fact that the string is compressed is signalled by the prefixed '.' at the
start of the base64 JSON.
There are 65 url-safe characters: the 64 used by url-safe base64 and the ':'.
These functions make use of all of them.
"""
import base64
import datetime
import json
import time
import warnings
import zlib
from django.conf import settings
from django.utils.crypto import constant_time_compare, salted_hmac
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.encoding import force_bytes
from django.utils.module_loading import import_string
from django.utils.regex_helper import _lazy_re_compile
_SEP_UNSAFE = _lazy_re_compile(r"^[A-z0-9-_=]*$")
BASE62_ALPHABET = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
class BadSignature(Exception):
"""Signature does not match."""
pass
class SignatureExpired(BadSignature):
"""Signature timestamp is older than required max_age."""
pass
def b62_encode(s):
if s == 0:
return "0"
sign = "-" if s < 0 else ""
s = abs(s)
encoded = ""
while s > 0:
s, remainder = divmod(s, 62)
encoded = BASE62_ALPHABET[remainder] + encoded
return sign + encoded
def b62_decode(s):
if s == "0":
return 0
sign = 1
if s[0] == "-":
s = s[1:]
sign = -1
decoded = 0
for digit in s:
decoded = decoded * 62 + BASE62_ALPHABET.index(digit)
return sign * decoded
def b64_encode(s):
return base64.urlsafe_b64encode(s).strip(b"=")
def b64_decode(s):
pad = b"=" * (-len(s) % 4)
return base64.urlsafe_b64decode(s + pad)
def base64_hmac(salt, value, key, algorithm="sha1"):
return b64_encode(
salted_hmac(salt, value, key, algorithm=algorithm).digest()
).decode()
def _cookie_signer_key(key):
# SECRET_KEYS items may be str or bytes.
return b"django.http.cookies" + force_bytes(key)
def get_cookie_signer(salt="django.core.signing.get_cookie_signer"):
Signer = import_string(settings.SIGNING_BACKEND)
return Signer(
key=_cookie_signer_key(settings.SECRET_KEY),
fallback_keys=map(_cookie_signer_key, settings.SECRET_KEY_FALLBACKS),
salt=salt,
)
class JSONSerializer:
"""
Simple wrapper around json to be used in signing.dumps and
signing.loads.
"""
def dumps(self, obj):
return json.dumps(obj, separators=(",", ":")).encode("latin-1")
def loads(self, data):
return json.loads(data.decode("latin-1"))
def dumps(
obj, key=None, salt="django.core.signing", serializer=JSONSerializer, compress=False
):
"""
Return URL-safe, hmac signed base64 compressed JSON string. If key is
None, use settings.SECRET_KEY instead. The hmac algorithm is the default
Signer algorithm.
If compress is True (not the default), check if compressing using zlib can
save some space. Prepend a '.' to signify compression. This is included
in the signature, to protect against zip bombs.
Salt can be used to namespace the hash, so that a signed string is
only valid for a given namespace. Leaving this at the default
value or re-using a salt value across different parts of your
application without good cause is a security risk.
The serializer is expected to return a bytestring.
"""
return TimestampSigner(key=key, salt=salt).sign_object(
obj, serializer=serializer, compress=compress
)
def loads(
s,
key=None,
salt="django.core.signing",
serializer=JSONSerializer,
max_age=None,
fallback_keys=None,
):
"""
Reverse of dumps(), raise BadSignature if signature fails.
The serializer is expected to accept a bytestring.
"""
return TimestampSigner(
key=key, salt=salt, fallback_keys=fallback_keys
).unsign_object(
s,
serializer=serializer,
max_age=max_age,
)
class Signer:
# RemovedInDjango51Warning: When the deprecation ends, replace with:
# def __init__(
# self, *, key=None, sep=":", salt=None, algorithm=None, fallback_keys=None
# ):
def __init__(
self,
*args,
key=None,
sep=":",
salt=None,
algorithm=None,
fallback_keys=None,
):
self.key = key or settings.SECRET_KEY
self.fallback_keys = (
fallback_keys
if fallback_keys is not None
else settings.SECRET_KEY_FALLBACKS
)
self.sep = sep
self.salt = salt or "%s.%s" % (
self.__class__.__module__,
self.__class__.__name__,
)
self.algorithm = algorithm or "sha256"
# RemovedInDjango51Warning.
if args:
warnings.warn(
f"Passing positional arguments to {self.__class__.__name__} is "
f"deprecated.",
RemovedInDjango51Warning,
stacklevel=2,
)
for arg, attr in zip(
args, ["key", "sep", "salt", "algorithm", "fallback_keys"]
):
if arg or attr == "sep":
setattr(self, attr, arg)
if _SEP_UNSAFE.match(self.sep):
raise ValueError(
"Unsafe Signer separator: %r (cannot be empty or consist of "
"only A-z0-9-_=)" % sep,
)
def signature(self, value, key=None):
key = key or self.key
return base64_hmac(self.salt + "signer", value, key, algorithm=self.algorithm)
def sign(self, value):
return "%s%s%s" % (value, self.sep, self.signature(value))
def unsign(self, signed_value):
if self.sep not in signed_value:
raise BadSignature('No "%s" found in value' % self.sep)
value, sig = signed_value.rsplit(self.sep, 1)
for key in [self.key, *self.fallback_keys]:
if constant_time_compare(sig, self.signature(value, key)):
return value
raise BadSignature('Signature "%s" does not match' % sig)
def sign_object(self, obj, serializer=JSONSerializer, compress=False):
"""
Return URL-safe, hmac signed base64 compressed JSON string.
If compress is True (not the default), check if compressing using zlib
can save some space. Prepend a '.' to signify compression. This is
included in the signature, to protect against zip bombs.
The serializer is expected to return a bytestring.
"""
data = serializer().dumps(obj)
# Flag for if it's been compressed or not.
is_compressed = False
if compress:
# Avoid zlib dependency unless compress is being used.
compressed = zlib.compress(data)
if len(compressed) < (len(data) - 1):
data = compressed
is_compressed = True
base64d = b64_encode(data).decode()
if is_compressed:
base64d = "." + base64d
return self.sign(base64d)
def unsign_object(self, signed_obj, serializer=JSONSerializer, **kwargs):
# Signer.unsign() returns str but base64 and zlib compression operate
# on bytes.
base64d = self.unsign(signed_obj, **kwargs).encode()
decompress = base64d[:1] == b"."
if decompress:
# It's compressed; uncompress it first.
base64d = base64d[1:]
data = b64_decode(base64d)
if decompress:
data = zlib.decompress(data)
return serializer().loads(data)
class TimestampSigner(Signer):
def timestamp(self):
return b62_encode(int(time.time()))
def sign(self, value):
value = "%s%s%s" % (value, self.sep, self.timestamp())
return super().sign(value)
def unsign(self, value, max_age=None):
"""
Retrieve original value and check it wasn't signed more
than max_age seconds ago.
"""
result = super().unsign(value)
value, timestamp = result.rsplit(self.sep, 1)
timestamp = b62_decode(timestamp)
if max_age is not None:
if isinstance(max_age, datetime.timedelta):
max_age = max_age.total_seconds()
# Check timestamp is not older than max_age
age = time.time() - timestamp
if age > max_age:
raise SignatureExpired("Signature age %s > %s seconds" % (age, max_age))
return value
|
a54805bd5791c60450fe0b870971131874f2c1f3cbc602791d0c291e4403263d | import datetime
import io
import json
import mimetypes
import os
import re
import sys
import time
import warnings
from email.header import Header
from http.client import responses
from urllib.parse import urlparse
from asgiref.sync import async_to_sync, sync_to_async
from django.conf import settings
from django.core import signals, signing
from django.core.exceptions import DisallowedRedirect
from django.core.serializers.json import DjangoJSONEncoder
from django.http.cookie import SimpleCookie
from django.utils import timezone
from django.utils.datastructures import CaseInsensitiveMapping
from django.utils.encoding import iri_to_uri
from django.utils.http import content_disposition_header, http_date
from django.utils.regex_helper import _lazy_re_compile
_charset_from_content_type_re = _lazy_re_compile(
r";\s*charset=(?P<charset>[^\s;]+)", re.I
)
class ResponseHeaders(CaseInsensitiveMapping):
def __init__(self, data):
"""
Populate the initial data using __setitem__ to ensure values are
correctly encoded.
"""
self._store = {}
if data:
for header, value in self._unpack_items(data):
self[header] = value
def _convert_to_charset(self, value, charset, mime_encode=False):
"""
Convert headers key/value to ascii/latin-1 native strings.
`charset` must be 'ascii' or 'latin-1'. If `mime_encode` is True and
`value` can't be represented in the given charset, apply MIME-encoding.
"""
try:
if isinstance(value, str):
# Ensure string is valid in given charset
value.encode(charset)
elif isinstance(value, bytes):
# Convert bytestring using given charset
value = value.decode(charset)
else:
value = str(value)
# Ensure string is valid in given charset.
value.encode(charset)
if "\n" in value or "\r" in value:
raise BadHeaderError(
f"Header values can't contain newlines (got {value!r})"
)
except UnicodeError as e:
# Encoding to a string of the specified charset failed, but we
# don't know what type that value was, or if it contains newlines,
# which we may need to check for before sending it to be
# encoded for multiple character sets.
if (isinstance(value, bytes) and (b"\n" in value or b"\r" in value)) or (
isinstance(value, str) and ("\n" in value or "\r" in value)
):
raise BadHeaderError(
f"Header values can't contain newlines (got {value!r})"
) from e
if mime_encode:
value = Header(value, "utf-8", maxlinelen=sys.maxsize).encode()
else:
e.reason += ", HTTP response headers must be in %s format" % charset
raise
return value
def __delitem__(self, key):
self.pop(key)
def __setitem__(self, key, value):
key = self._convert_to_charset(key, "ascii")
value = self._convert_to_charset(value, "latin-1", mime_encode=True)
self._store[key.lower()] = (key, value)
def pop(self, key, default=None):
return self._store.pop(key.lower(), default)
def setdefault(self, key, value):
if key not in self:
self[key] = value
class BadHeaderError(ValueError):
pass
class HttpResponseBase:
"""
An HTTP response base class with dictionary-accessed headers.
This class doesn't handle content. It should not be used directly.
Use the HttpResponse and StreamingHttpResponse subclasses instead.
"""
status_code = 200
def __init__(
self, content_type=None, status=None, reason=None, charset=None, headers=None
):
self.headers = ResponseHeaders(headers)
self._charset = charset
if "Content-Type" not in self.headers:
if content_type is None:
content_type = f"text/html; charset={self.charset}"
self.headers["Content-Type"] = content_type
elif content_type:
raise ValueError(
"'headers' must not contain 'Content-Type' when the "
"'content_type' parameter is provided."
)
self._resource_closers = []
# This parameter is set by the handler. It's necessary to preserve the
# historical behavior of request_finished.
self._handler_class = None
self.cookies = SimpleCookie()
self.closed = False
if status is not None:
try:
self.status_code = int(status)
except (ValueError, TypeError):
raise TypeError("HTTP status code must be an integer.")
if not 100 <= self.status_code <= 599:
raise ValueError("HTTP status code must be an integer from 100 to 599.")
self._reason_phrase = reason
@property
def reason_phrase(self):
if self._reason_phrase is not None:
return self._reason_phrase
# Leave self._reason_phrase unset in order to use the default
# reason phrase for status code.
return responses.get(self.status_code, "Unknown Status Code")
@reason_phrase.setter
def reason_phrase(self, value):
self._reason_phrase = value
@property
def charset(self):
if self._charset is not None:
return self._charset
# The Content-Type header may not yet be set, because the charset is
# being inserted *into* it.
if content_type := self.headers.get("Content-Type"):
if matched := _charset_from_content_type_re.search(content_type):
# Extract the charset and strip its double quotes.
# Note that having parsed it from the Content-Type, we don't
# store it back into the _charset for later intentionally, to
# allow for the Content-Type to be switched again later.
return matched["charset"].replace('"', "")
return settings.DEFAULT_CHARSET
@charset.setter
def charset(self, value):
self._charset = value
def serialize_headers(self):
"""HTTP headers as a bytestring."""
return b"\r\n".join(
[
key.encode("ascii") + b": " + value.encode("latin-1")
for key, value in self.headers.items()
]
)
__bytes__ = serialize_headers
@property
def _content_type_for_repr(self):
return (
', "%s"' % self.headers["Content-Type"]
if "Content-Type" in self.headers
else ""
)
def __setitem__(self, header, value):
self.headers[header] = value
def __delitem__(self, header):
del self.headers[header]
def __getitem__(self, header):
return self.headers[header]
def has_header(self, header):
"""Case-insensitive check for a header."""
return header in self.headers
__contains__ = has_header
def items(self):
return self.headers.items()
def get(self, header, alternate=None):
return self.headers.get(header, alternate)
def set_cookie(
self,
key,
value="",
max_age=None,
expires=None,
path="/",
domain=None,
secure=False,
httponly=False,
samesite=None,
):
"""
Set a cookie.
``expires`` can be:
- a string in the correct format,
- a naive ``datetime.datetime`` object in UTC,
- an aware ``datetime.datetime`` object in any time zone.
If it is a ``datetime.datetime`` object then calculate ``max_age``.
``max_age`` can be:
- int/float specifying seconds,
- ``datetime.timedelta`` object.
"""
self.cookies[key] = value
if expires is not None:
if isinstance(expires, datetime.datetime):
if timezone.is_naive(expires):
expires = timezone.make_aware(expires, datetime.timezone.utc)
delta = expires - datetime.datetime.now(tz=datetime.timezone.utc)
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta += datetime.timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
if max_age is not None:
raise ValueError("'expires' and 'max_age' can't be used together.")
max_age = max(0, delta.days * 86400 + delta.seconds)
else:
self.cookies[key]["expires"] = expires
else:
self.cookies[key]["expires"] = ""
if max_age is not None:
if isinstance(max_age, datetime.timedelta):
max_age = max_age.total_seconds()
self.cookies[key]["max-age"] = int(max_age)
# IE requires expires, so set it if hasn't been already.
if not expires:
self.cookies[key]["expires"] = http_date(time.time() + max_age)
if path is not None:
self.cookies[key]["path"] = path
if domain is not None:
self.cookies[key]["domain"] = domain
if secure:
self.cookies[key]["secure"] = True
if httponly:
self.cookies[key]["httponly"] = True
if samesite:
if samesite.lower() not in ("lax", "none", "strict"):
raise ValueError('samesite must be "lax", "none", or "strict".')
self.cookies[key]["samesite"] = samesite
def setdefault(self, key, value):
"""Set a header unless it has already been set."""
self.headers.setdefault(key, value)
def set_signed_cookie(self, key, value, salt="", **kwargs):
value = signing.get_cookie_signer(salt=key + salt).sign(value)
return self.set_cookie(key, value, **kwargs)
def delete_cookie(self, key, path="/", domain=None, samesite=None):
# Browsers can ignore the Set-Cookie header if the cookie doesn't use
# the secure flag and:
# - the cookie name starts with "__Host-" or "__Secure-", or
# - the samesite is "none".
secure = key.startswith(("__Secure-", "__Host-")) or (
samesite and samesite.lower() == "none"
)
self.set_cookie(
key,
max_age=0,
path=path,
domain=domain,
secure=secure,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
samesite=samesite,
)
# Common methods used by subclasses
def make_bytes(self, value):
"""Turn a value into a bytestring encoded in the output charset."""
# Per PEP 3333, this response body must be bytes. To avoid returning
# an instance of a subclass, this function returns `bytes(value)`.
# This doesn't make a copy when `value` already contains bytes.
# Handle string types -- we can't rely on force_bytes here because:
# - Python attempts str conversion first
# - when self._charset != 'utf-8' it re-encodes the content
if isinstance(value, (bytes, memoryview)):
return bytes(value)
if isinstance(value, str):
return bytes(value.encode(self.charset))
# Handle non-string types.
return str(value).encode(self.charset)
# These methods partially implement the file-like object interface.
# See https://docs.python.org/library/io.html#io.IOBase
# The WSGI server must call this method upon completion of the request.
# See http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html
def close(self):
for closer in self._resource_closers:
try:
closer()
except Exception:
pass
# Free resources that were still referenced.
self._resource_closers.clear()
self.closed = True
signals.request_finished.send(sender=self._handler_class)
def write(self, content):
raise OSError("This %s instance is not writable" % self.__class__.__name__)
def flush(self):
pass
def tell(self):
raise OSError(
"This %s instance cannot tell its position" % self.__class__.__name__
)
# These methods partially implement a stream-like object interface.
# See https://docs.python.org/library/io.html#io.IOBase
def readable(self):
return False
def seekable(self):
return False
def writable(self):
return False
def writelines(self, lines):
raise OSError("This %s instance is not writable" % self.__class__.__name__)
class HttpResponse(HttpResponseBase):
"""
An HTTP response class with a string as content.
This content can be read, appended to, or replaced.
"""
streaming = False
non_picklable_attrs = frozenset(
[
"resolver_match",
# Non-picklable attributes added by test clients.
"client",
"context",
"json",
"templates",
]
)
def __init__(self, content=b"", *args, **kwargs):
super().__init__(*args, **kwargs)
# Content is a bytestring. See the `content` property methods.
self.content = content
def __getstate__(self):
obj_dict = self.__dict__.copy()
for attr in self.non_picklable_attrs:
if attr in obj_dict:
del obj_dict[attr]
return obj_dict
def __repr__(self):
return "<%(cls)s status_code=%(status_code)d%(content_type)s>" % {
"cls": self.__class__.__name__,
"status_code": self.status_code,
"content_type": self._content_type_for_repr,
}
def serialize(self):
"""Full HTTP message, including headers, as a bytestring."""
return self.serialize_headers() + b"\r\n\r\n" + self.content
__bytes__ = serialize
@property
def content(self):
return b"".join(self._container)
@content.setter
def content(self, value):
# Consume iterators upon assignment to allow repeated iteration.
if hasattr(value, "__iter__") and not isinstance(
value, (bytes, memoryview, str)
):
content = b"".join(self.make_bytes(chunk) for chunk in value)
if hasattr(value, "close"):
try:
value.close()
except Exception:
pass
else:
content = self.make_bytes(value)
# Create a list of properly encoded bytestrings to support write().
self._container = [content]
def __iter__(self):
return iter(self._container)
def write(self, content):
self._container.append(self.make_bytes(content))
def tell(self):
return len(self.content)
def getvalue(self):
return self.content
def writable(self):
return True
def writelines(self, lines):
for line in lines:
self.write(line)
class StreamingHttpResponse(HttpResponseBase):
"""
A streaming HTTP response class with an iterator as content.
This should only be iterated once, when the response is streamed to the
client. However, it can be appended to or replaced with a new iterator
that wraps the original content (or yields entirely new content).
"""
streaming = True
def __init__(self, streaming_content=(), *args, **kwargs):
super().__init__(*args, **kwargs)
# `streaming_content` should be an iterable of bytestrings.
# See the `streaming_content` property methods.
self.streaming_content = streaming_content
def __repr__(self):
return "<%(cls)s status_code=%(status_code)d%(content_type)s>" % {
"cls": self.__class__.__qualname__,
"status_code": self.status_code,
"content_type": self._content_type_for_repr,
}
@property
def content(self):
raise AttributeError(
"This %s instance has no `content` attribute. Use "
"`streaming_content` instead." % self.__class__.__name__
)
@property
def streaming_content(self):
if self.is_async:
# pull to lexical scope to capture fixed reference in case
# streaming_content is set again later.
_iterator = self._iterator
async def awrapper():
async for part in _iterator:
yield self.make_bytes(part)
return awrapper()
else:
return map(self.make_bytes, self._iterator)
@streaming_content.setter
def streaming_content(self, value):
self._set_streaming_content(value)
def _set_streaming_content(self, value):
# Ensure we can never iterate on "value" more than once.
try:
self._iterator = iter(value)
self.is_async = False
except TypeError:
self._iterator = value.__aiter__()
self.is_async = True
if hasattr(value, "close"):
self._resource_closers.append(value.close)
def __iter__(self):
try:
return iter(self.streaming_content)
except TypeError:
warnings.warn(
"StreamingHttpResponse must consume asynchronous iterators in order to "
"serve them synchronously. Use a synchronous iterator instead.",
Warning,
)
# async iterator. Consume in async_to_sync and map back.
async def to_list(_iterator):
as_list = []
async for chunk in _iterator:
as_list.append(chunk)
return as_list
return map(self.make_bytes, iter(async_to_sync(to_list)(self._iterator)))
async def __aiter__(self):
try:
async for part in self.streaming_content:
yield part
except TypeError:
warnings.warn(
"StreamingHttpResponse must consume synchronous iterators in order to "
"serve them asynchronously. Use an asynchronous iterator instead.",
Warning,
)
# sync iterator. Consume via sync_to_async and yield via async
# generator.
for part in await sync_to_async(list)(self.streaming_content):
yield part
def getvalue(self):
return b"".join(self.streaming_content)
class FileResponse(StreamingHttpResponse):
"""
A streaming HTTP response class optimized for files.
"""
block_size = 4096
def __init__(self, *args, as_attachment=False, filename="", **kwargs):
self.as_attachment = as_attachment
self.filename = filename
self._no_explicit_content_type = (
"content_type" not in kwargs or kwargs["content_type"] is None
)
super().__init__(*args, **kwargs)
def _set_streaming_content(self, value):
if not hasattr(value, "read"):
self.file_to_stream = None
return super()._set_streaming_content(value)
self.file_to_stream = filelike = value
if hasattr(filelike, "close"):
self._resource_closers.append(filelike.close)
value = iter(lambda: filelike.read(self.block_size), b"")
self.set_headers(filelike)
super()._set_streaming_content(value)
def set_headers(self, filelike):
"""
Set some common response headers (Content-Length, Content-Type, and
Content-Disposition) based on the `filelike` response content.
"""
filename = getattr(filelike, "name", "")
filename = filename if isinstance(filename, str) else ""
seekable = hasattr(filelike, "seek") and (
not hasattr(filelike, "seekable") or filelike.seekable()
)
if hasattr(filelike, "tell"):
if seekable:
initial_position = filelike.tell()
filelike.seek(0, io.SEEK_END)
self.headers["Content-Length"] = filelike.tell() - initial_position
filelike.seek(initial_position)
elif hasattr(filelike, "getbuffer"):
self.headers["Content-Length"] = (
filelike.getbuffer().nbytes - filelike.tell()
)
elif os.path.exists(filename):
self.headers["Content-Length"] = (
os.path.getsize(filename) - filelike.tell()
)
elif seekable:
self.headers["Content-Length"] = sum(
iter(lambda: len(filelike.read(self.block_size)), 0)
)
filelike.seek(-int(self.headers["Content-Length"]), io.SEEK_END)
filename = os.path.basename(self.filename or filename)
if self._no_explicit_content_type:
if filename:
content_type, encoding = mimetypes.guess_type(filename)
# Encoding isn't set to prevent browsers from automatically
# uncompressing files.
content_type = {
"bzip2": "application/x-bzip",
"gzip": "application/gzip",
"xz": "application/x-xz",
}.get(encoding, content_type)
self.headers["Content-Type"] = (
content_type or "application/octet-stream"
)
else:
self.headers["Content-Type"] = "application/octet-stream"
if content_disposition := content_disposition_header(
self.as_attachment, filename
):
self.headers["Content-Disposition"] = content_disposition
class HttpResponseRedirectBase(HttpResponse):
allowed_schemes = ["http", "https", "ftp"]
def __init__(self, redirect_to, *args, **kwargs):
super().__init__(*args, **kwargs)
self["Location"] = iri_to_uri(redirect_to)
parsed = urlparse(str(redirect_to))
if parsed.scheme and parsed.scheme not in self.allowed_schemes:
raise DisallowedRedirect(
"Unsafe redirect to URL with protocol '%s'" % parsed.scheme
)
url = property(lambda self: self["Location"])
def __repr__(self):
return (
'<%(cls)s status_code=%(status_code)d%(content_type)s, url="%(url)s">'
% {
"cls": self.__class__.__name__,
"status_code": self.status_code,
"content_type": self._content_type_for_repr,
"url": self.url,
}
)
class HttpResponseRedirect(HttpResponseRedirectBase):
status_code = 302
class HttpResponsePermanentRedirect(HttpResponseRedirectBase):
status_code = 301
class HttpResponseNotModified(HttpResponse):
status_code = 304
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
del self["content-type"]
@HttpResponse.content.setter
def content(self, value):
if value:
raise AttributeError(
"You cannot set content to a 304 (Not Modified) response"
)
self._container = []
class HttpResponseBadRequest(HttpResponse):
status_code = 400
class HttpResponseNotFound(HttpResponse):
status_code = 404
class HttpResponseForbidden(HttpResponse):
status_code = 403
class HttpResponseNotAllowed(HttpResponse):
status_code = 405
def __init__(self, permitted_methods, *args, **kwargs):
super().__init__(*args, **kwargs)
self["Allow"] = ", ".join(permitted_methods)
def __repr__(self):
return "<%(cls)s [%(methods)s] status_code=%(status_code)d%(content_type)s>" % {
"cls": self.__class__.__name__,
"status_code": self.status_code,
"content_type": self._content_type_for_repr,
"methods": self["Allow"],
}
class HttpResponseGone(HttpResponse):
status_code = 410
class HttpResponseServerError(HttpResponse):
status_code = 500
class Http404(Exception):
pass
class JsonResponse(HttpResponse):
"""
An HTTP response class that consumes data to be serialized to JSON.
:param data: Data to be dumped into json. By default only ``dict`` objects
are allowed to be passed due to a security flaw before ECMAScript 5. See
the ``safe`` parameter for more information.
:param encoder: Should be a json encoder class. Defaults to
``django.core.serializers.json.DjangoJSONEncoder``.
:param safe: Controls if only ``dict`` objects may be serialized. Defaults
to ``True``.
:param json_dumps_params: A dictionary of kwargs passed to json.dumps().
"""
def __init__(
self,
data,
encoder=DjangoJSONEncoder,
safe=True,
json_dumps_params=None,
**kwargs,
):
if safe and not isinstance(data, dict):
raise TypeError(
"In order to allow non-dict objects to be serialized set the "
"safe parameter to False."
)
if json_dumps_params is None:
json_dumps_params = {}
kwargs.setdefault("content_type", "application/json")
data = json.dumps(data, cls=encoder, **json_dumps_params)
super().__init__(content=data, **kwargs)
|
a791db2c0d35f85412a82de1486220ee3424db792104c9fdb2c83877d187ba2e | import logging
from asgiref.sync import iscoroutinefunction, markcoroutinefunction
from django.core.exceptions import ImproperlyConfigured
from django.http import (
HttpResponse,
HttpResponseGone,
HttpResponseNotAllowed,
HttpResponsePermanentRedirect,
HttpResponseRedirect,
)
from django.template.response import TemplateResponse
from django.urls import reverse
from django.utils.decorators import classonlymethod
from django.utils.functional import classproperty
logger = logging.getLogger("django.request")
class ContextMixin:
"""
A default context mixin that passes the keyword arguments received by
get_context_data() as the template context.
"""
extra_context = None
def get_context_data(self, **kwargs):
kwargs.setdefault("view", self)
if self.extra_context is not None:
kwargs.update(self.extra_context)
return kwargs
class View:
"""
Intentionally simple parent class for all views. Only implements
dispatch-by-method and simple sanity checking.
"""
http_method_names = [
"get",
"post",
"put",
"patch",
"delete",
"head",
"options",
"trace",
]
def __init__(self, **kwargs):
"""
Constructor. Called in the URLconf; can contain helpful extra
keyword arguments, and other things.
"""
# Go through keyword arguments, and either save their values to our
# instance, or raise an error.
for key, value in kwargs.items():
setattr(self, key, value)
@classproperty
def view_is_async(cls):
handlers = [
getattr(cls, method)
for method in cls.http_method_names
if (method != "options" and hasattr(cls, method))
]
if not handlers:
return False
is_async = iscoroutinefunction(handlers[0])
if not all(iscoroutinefunction(h) == is_async for h in handlers[1:]):
raise ImproperlyConfigured(
f"{cls.__qualname__} HTTP handlers must either be all sync or all "
"async."
)
return is_async
@classonlymethod
def as_view(cls, **initkwargs):
"""Main entry point for a request-response process."""
for key in initkwargs:
if key in cls.http_method_names:
raise TypeError(
"The method name %s is not accepted as a keyword argument "
"to %s()." % (key, cls.__name__)
)
if not hasattr(cls, key):
raise TypeError(
"%s() received an invalid keyword %r. as_view "
"only accepts arguments that are already "
"attributes of the class." % (cls.__name__, key)
)
def view(request, *args, **kwargs):
self = cls(**initkwargs)
self.setup(request, *args, **kwargs)
if not hasattr(self, "request"):
raise AttributeError(
"%s instance has no 'request' attribute. Did you override "
"setup() and forget to call super()?" % cls.__name__
)
return self.dispatch(request, *args, **kwargs)
view.view_class = cls
view.view_initkwargs = initkwargs
# __name__ and __qualname__ are intentionally left unchanged as
# view_class should be used to robustly determine the name of the view
# instead.
view.__doc__ = cls.__doc__
view.__module__ = cls.__module__
view.__annotations__ = cls.dispatch.__annotations__
# Copy possible attributes set by decorators, e.g. @csrf_exempt, from
# the dispatch method.
view.__dict__.update(cls.dispatch.__dict__)
# Mark the callback if the view class is async.
if cls.view_is_async:
markcoroutinefunction(view)
return view
def setup(self, request, *args, **kwargs):
"""Initialize attributes shared by all view methods."""
if hasattr(self, "get") and not hasattr(self, "head"):
self.head = self.get
self.request = request
self.args = args
self.kwargs = kwargs
def dispatch(self, request, *args, **kwargs):
# Try to dispatch to the right method; if a method doesn't exist,
# defer to the error handler. Also defer to the error handler if the
# request method isn't on the approved list.
if request.method.lower() in self.http_method_names:
handler = getattr(
self, request.method.lower(), self.http_method_not_allowed
)
else:
handler = self.http_method_not_allowed
return handler(request, *args, **kwargs)
def http_method_not_allowed(self, request, *args, **kwargs):
logger.warning(
"Method Not Allowed (%s): %s",
request.method,
request.path,
extra={"status_code": 405, "request": request},
)
response = HttpResponseNotAllowed(self._allowed_methods())
if self.view_is_async:
async def func():
return response
return func()
else:
return response
def options(self, request, *args, **kwargs):
"""Handle responding to requests for the OPTIONS HTTP verb."""
response = HttpResponse()
response.headers["Allow"] = ", ".join(self._allowed_methods())
response.headers["Content-Length"] = "0"
if self.view_is_async:
async def func():
return response
return func()
else:
return response
def _allowed_methods(self):
return [m.upper() for m in self.http_method_names if hasattr(self, m)]
class TemplateResponseMixin:
"""A mixin that can be used to render a template."""
template_name = None
template_engine = None
response_class = TemplateResponse
content_type = None
def render_to_response(self, context, **response_kwargs):
"""
Return a response, using the `response_class` for this view, with a
template rendered with the given context.
Pass response_kwargs to the constructor of the response class.
"""
response_kwargs.setdefault("content_type", self.content_type)
return self.response_class(
request=self.request,
template=self.get_template_names(),
context=context,
using=self.template_engine,
**response_kwargs,
)
def get_template_names(self):
"""
Return a list of template names to be used for the request. Must return
a list. May not be called if render_to_response() is overridden.
"""
if self.template_name is None:
raise ImproperlyConfigured(
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
else:
return [self.template_name]
class TemplateView(TemplateResponseMixin, ContextMixin, View):
"""
Render a template. Pass keyword arguments from the URLconf to the context.
"""
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
class RedirectView(View):
"""Provide a redirect on any GET request."""
permanent = False
url = None
pattern_name = None
query_string = False
def get_redirect_url(self, *args, **kwargs):
"""
Return the URL redirect to. Keyword arguments from the URL pattern
match generating the redirect request are provided as kwargs to this
method.
"""
if self.url:
url = self.url % kwargs
elif self.pattern_name:
url = reverse(self.pattern_name, args=args, kwargs=kwargs)
else:
return None
args = self.request.META.get("QUERY_STRING", "")
if args and self.query_string:
url = "%s?%s" % (url, args)
return url
def get(self, request, *args, **kwargs):
url = self.get_redirect_url(*args, **kwargs)
if url:
if self.permanent:
return HttpResponsePermanentRedirect(url)
else:
return HttpResponseRedirect(url)
else:
logger.warning(
"Gone: %s", request.path, extra={"status_code": 410, "request": request}
)
return HttpResponseGone()
def head(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def options(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
|
d89a0c321589defcac31a818917f8fefa26b14c5392bab9f5dbd12baa7809d4a | import functools
import re
from collections import defaultdict
from itertools import chain
from django.conf import settings
from django.db import models
from django.db.migrations import operations
from django.db.migrations.migration import Migration
from django.db.migrations.operations.models import AlterModelOptions
from django.db.migrations.optimizer import MigrationOptimizer
from django.db.migrations.questioner import MigrationQuestioner
from django.db.migrations.utils import (
COMPILED_REGEX_TYPE,
RegexObject,
resolve_relation,
)
from django.utils.topological_sort import stable_topological_sort
class MigrationAutodetector:
"""
Take a pair of ProjectStates and compare them to see what the first would
need doing to make it match the second (the second usually being the
project's current state).
Note that this naturally operates on entire projects at a time,
as it's likely that changes interact (for example, you can't
add a ForeignKey without having a migration to add the table it
depends on first). A user interface may offer single-app usage
if it wishes, with the caveat that it may not always be possible.
"""
def __init__(self, from_state, to_state, questioner=None):
self.from_state = from_state
self.to_state = to_state
self.questioner = questioner or MigrationQuestioner()
self.existing_apps = {app for app, model in from_state.models}
def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None):
"""
Main entry point to produce a list of applicable changes.
Take a graph to base names on and an optional set of apps
to try and restrict to (restriction is not guaranteed)
"""
changes = self._detect_changes(convert_apps, graph)
changes = self.arrange_for_graph(changes, graph, migration_name)
if trim_to_apps:
changes = self._trim_to_apps(changes, trim_to_apps)
return changes
def deep_deconstruct(self, obj):
"""
Recursive deconstruction for a field and its arguments.
Used for full comparison for rename/alter; sometimes a single-level
deconstruction will not compare correctly.
"""
if isinstance(obj, list):
return [self.deep_deconstruct(value) for value in obj]
elif isinstance(obj, tuple):
return tuple(self.deep_deconstruct(value) for value in obj)
elif isinstance(obj, dict):
return {key: self.deep_deconstruct(value) for key, value in obj.items()}
elif isinstance(obj, functools.partial):
return (
obj.func,
self.deep_deconstruct(obj.args),
self.deep_deconstruct(obj.keywords),
)
elif isinstance(obj, COMPILED_REGEX_TYPE):
return RegexObject(obj)
elif isinstance(obj, type):
# If this is a type that implements 'deconstruct' as an instance method,
# avoid treating this as being deconstructible itself - see #22951
return obj
elif hasattr(obj, "deconstruct"):
deconstructed = obj.deconstruct()
if isinstance(obj, models.Field):
# we have a field which also returns a name
deconstructed = deconstructed[1:]
path, args, kwargs = deconstructed
return (
path,
[self.deep_deconstruct(value) for value in args],
{key: self.deep_deconstruct(value) for key, value in kwargs.items()},
)
else:
return obj
def only_relation_agnostic_fields(self, fields):
"""
Return a definition of the fields that ignores field names and
what related fields actually relate to. Used for detecting renames (as
the related fields change during renames).
"""
fields_def = []
for name, field in sorted(fields.items()):
deconstruction = self.deep_deconstruct(field)
if field.remote_field and field.remote_field.model:
deconstruction[2].pop("to", None)
fields_def.append(deconstruction)
return fields_def
def _detect_changes(self, convert_apps=None, graph=None):
"""
Return a dict of migration plans which will achieve the
change from from_state to to_state. The dict has app labels
as keys and a list of migrations as values.
The resulting migrations aren't specially named, but the names
do matter for dependencies inside the set.
convert_apps is the list of apps to convert to use migrations
(i.e. to make initial migrations for, in the usual case)
graph is an optional argument that, if provided, can help improve
dependency generation and avoid potential circular dependencies.
"""
# The first phase is generating all the operations for each app
# and gathering them into a big per-app list.
# Then go through that list, order it, and split into migrations to
# resolve dependencies caused by M2Ms and FKs.
self.generated_operations = {}
self.altered_indexes = {}
self.altered_constraints = {}
self.renamed_fields = {}
# Prepare some old/new state and model lists, separating
# proxy models and ignoring unmigrated apps.
self.old_model_keys = set()
self.old_proxy_keys = set()
self.old_unmanaged_keys = set()
self.new_model_keys = set()
self.new_proxy_keys = set()
self.new_unmanaged_keys = set()
for (app_label, model_name), model_state in self.from_state.models.items():
if not model_state.options.get("managed", True):
self.old_unmanaged_keys.add((app_label, model_name))
elif app_label not in self.from_state.real_apps:
if model_state.options.get("proxy"):
self.old_proxy_keys.add((app_label, model_name))
else:
self.old_model_keys.add((app_label, model_name))
for (app_label, model_name), model_state in self.to_state.models.items():
if not model_state.options.get("managed", True):
self.new_unmanaged_keys.add((app_label, model_name))
elif app_label not in self.from_state.real_apps or (
convert_apps and app_label in convert_apps
):
if model_state.options.get("proxy"):
self.new_proxy_keys.add((app_label, model_name))
else:
self.new_model_keys.add((app_label, model_name))
self.from_state.resolve_fields_and_relations()
self.to_state.resolve_fields_and_relations()
# Renames have to come first
self.generate_renamed_models()
# Prepare lists of fields and generate through model map
self._prepare_field_lists()
self._generate_through_model_map()
# Generate non-rename model operations
self.generate_deleted_models()
self.generate_created_models()
self.generate_deleted_proxies()
self.generate_created_proxies()
self.generate_altered_options()
self.generate_altered_managers()
self.generate_altered_db_table_comment()
# Create the renamed fields and store them in self.renamed_fields.
# They are used by create_altered_indexes(), generate_altered_fields(),
# generate_removed_altered_index/unique_together(), and
# generate_altered_index/unique_together().
self.create_renamed_fields()
# Create the altered indexes and store them in self.altered_indexes.
# This avoids the same computation in generate_removed_indexes()
# and generate_added_indexes().
self.create_altered_indexes()
self.create_altered_constraints()
# Generate index removal operations before field is removed
self.generate_removed_constraints()
self.generate_removed_indexes()
# Generate field renaming operations.
self.generate_renamed_fields()
self.generate_renamed_indexes()
# Generate removal of foo together.
self.generate_removed_altered_unique_together()
self.generate_removed_altered_index_together() # RemovedInDjango51Warning.
# Generate field operations.
self.generate_removed_fields()
self.generate_added_fields()
self.generate_altered_fields()
self.generate_altered_order_with_respect_to()
self.generate_altered_unique_together()
self.generate_altered_index_together() # RemovedInDjango51Warning.
self.generate_added_indexes()
self.generate_added_constraints()
self.generate_altered_db_table()
self._sort_migrations()
self._build_migration_list(graph)
self._optimize_migrations()
return self.migrations
def _prepare_field_lists(self):
"""
Prepare field lists and a list of the fields that used through models
in the old state so dependencies can be made from the through model
deletion to the field that uses it.
"""
self.kept_model_keys = self.old_model_keys & self.new_model_keys
self.kept_proxy_keys = self.old_proxy_keys & self.new_proxy_keys
self.kept_unmanaged_keys = self.old_unmanaged_keys & self.new_unmanaged_keys
self.through_users = {}
self.old_field_keys = {
(app_label, model_name, field_name)
for app_label, model_name in self.kept_model_keys
for field_name in self.from_state.models[
app_label, self.renamed_models.get((app_label, model_name), model_name)
].fields
}
self.new_field_keys = {
(app_label, model_name, field_name)
for app_label, model_name in self.kept_model_keys
for field_name in self.to_state.models[app_label, model_name].fields
}
def _generate_through_model_map(self):
"""Through model map generation."""
for app_label, model_name in sorted(self.old_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
for field_name, field in old_model_state.fields.items():
if hasattr(field, "remote_field") and getattr(
field.remote_field, "through", None
):
through_key = resolve_relation(
field.remote_field.through, app_label, model_name
)
self.through_users[through_key] = (
app_label,
old_model_name,
field_name,
)
@staticmethod
def _resolve_dependency(dependency):
"""
Return the resolved dependency and a boolean denoting whether or not
it was swappable.
"""
if dependency[0] != "__setting__":
return dependency, False
resolved_app_label, resolved_object_name = getattr(
settings, dependency[1]
).split(".")
return (resolved_app_label, resolved_object_name.lower()) + dependency[2:], True
def _build_migration_list(self, graph=None):
"""
Chop the lists of operations up into migrations with dependencies on
each other. Do this by going through an app's list of operations until
one is found that has an outgoing dependency that isn't in another
app's migration yet (hasn't been chopped off its list). Then chop off
the operations before it into a migration and move onto the next app.
If the loops completes without doing anything, there's a circular
dependency (which _should_ be impossible as the operations are
all split at this point so they can't depend and be depended on).
"""
self.migrations = {}
num_ops = sum(len(x) for x in self.generated_operations.values())
chop_mode = False
while num_ops:
# On every iteration, we step through all the apps and see if there
# is a completed set of operations.
# If we find that a subset of the operations are complete we can
# try to chop it off from the rest and continue, but we only
# do this if we've already been through the list once before
# without any chopping and nothing has changed.
for app_label in sorted(self.generated_operations):
chopped = []
dependencies = set()
for operation in list(self.generated_operations[app_label]):
deps_satisfied = True
operation_dependencies = set()
for dep in operation._auto_deps:
# Temporarily resolve the swappable dependency to
# prevent circular references. While keeping the
# dependency checks on the resolved model, add the
# swappable dependencies.
original_dep = dep
dep, is_swappable_dep = self._resolve_dependency(dep)
if dep[0] != app_label:
# External app dependency. See if it's not yet
# satisfied.
for other_operation in self.generated_operations.get(
dep[0], []
):
if self.check_dependency(other_operation, dep):
deps_satisfied = False
break
if not deps_satisfied:
break
else:
if is_swappable_dep:
operation_dependencies.add(
(original_dep[0], original_dep[1])
)
elif dep[0] in self.migrations:
operation_dependencies.add(
(dep[0], self.migrations[dep[0]][-1].name)
)
else:
# If we can't find the other app, we add a
# first/last dependency, but only if we've
# already been through once and checked
# everything.
if chop_mode:
# If the app already exists, we add a
# dependency on the last migration, as
# we don't know which migration
# contains the target field. If it's
# not yet migrated or has no
# migrations, we use __first__.
if graph and graph.leaf_nodes(dep[0]):
operation_dependencies.add(
graph.leaf_nodes(dep[0])[0]
)
else:
operation_dependencies.add(
(dep[0], "__first__")
)
else:
deps_satisfied = False
if deps_satisfied:
chopped.append(operation)
dependencies.update(operation_dependencies)
del self.generated_operations[app_label][0]
else:
break
# Make a migration! Well, only if there's stuff to put in it
if dependencies or chopped:
if not self.generated_operations[app_label] or chop_mode:
subclass = type(
"Migration",
(Migration,),
{"operations": [], "dependencies": []},
)
instance = subclass(
"auto_%i" % (len(self.migrations.get(app_label, [])) + 1),
app_label,
)
instance.dependencies = list(dependencies)
instance.operations = chopped
instance.initial = app_label not in self.existing_apps
self.migrations.setdefault(app_label, []).append(instance)
chop_mode = False
else:
self.generated_operations[app_label] = (
chopped + self.generated_operations[app_label]
)
new_num_ops = sum(len(x) for x in self.generated_operations.values())
if new_num_ops == num_ops:
if not chop_mode:
chop_mode = True
else:
raise ValueError(
"Cannot resolve operation dependencies: %r"
% self.generated_operations
)
num_ops = new_num_ops
def _sort_migrations(self):
"""
Reorder to make things possible. Reordering may be needed so FKs work
nicely inside the same app.
"""
for app_label, ops in sorted(self.generated_operations.items()):
# construct a dependency graph for intra-app dependencies
dependency_graph = {op: set() for op in ops}
for op in ops:
for dep in op._auto_deps:
# Resolve intra-app dependencies to handle circular
# references involving a swappable model.
dep = self._resolve_dependency(dep)[0]
if dep[0] == app_label:
for op2 in ops:
if self.check_dependency(op2, dep):
dependency_graph[op].add(op2)
# we use a stable sort for deterministic tests & general behavior
self.generated_operations[app_label] = stable_topological_sort(
ops, dependency_graph
)
def _optimize_migrations(self):
# Add in internal dependencies among the migrations
for app_label, migrations in self.migrations.items():
for m1, m2 in zip(migrations, migrations[1:]):
m2.dependencies.append((app_label, m1.name))
# De-dupe dependencies
for migrations in self.migrations.values():
for migration in migrations:
migration.dependencies = list(set(migration.dependencies))
# Optimize migrations
for app_label, migrations in self.migrations.items():
for migration in migrations:
migration.operations = MigrationOptimizer().optimize(
migration.operations, app_label
)
def check_dependency(self, operation, dependency):
"""
Return True if the given operation depends on the given dependency,
False otherwise.
"""
# Created model
if dependency[2] is None and dependency[3] is True:
return (
isinstance(operation, operations.CreateModel)
and operation.name_lower == dependency[1].lower()
)
# Created field
elif dependency[2] is not None and dependency[3] is True:
return (
isinstance(operation, operations.CreateModel)
and operation.name_lower == dependency[1].lower()
and any(dependency[2] == x for x, y in operation.fields)
) or (
isinstance(operation, operations.AddField)
and operation.model_name_lower == dependency[1].lower()
and operation.name_lower == dependency[2].lower()
)
# Removed field
elif dependency[2] is not None and dependency[3] is False:
return (
isinstance(operation, operations.RemoveField)
and operation.model_name_lower == dependency[1].lower()
and operation.name_lower == dependency[2].lower()
)
# Removed model
elif dependency[2] is None and dependency[3] is False:
return (
isinstance(operation, operations.DeleteModel)
and operation.name_lower == dependency[1].lower()
)
# Field being altered
elif dependency[2] is not None and dependency[3] == "alter":
return (
isinstance(operation, operations.AlterField)
and operation.model_name_lower == dependency[1].lower()
and operation.name_lower == dependency[2].lower()
)
# order_with_respect_to being unset for a field
elif dependency[2] is not None and dependency[3] == "order_wrt_unset":
return (
isinstance(operation, operations.AlterOrderWithRespectTo)
and operation.name_lower == dependency[1].lower()
and (operation.order_with_respect_to or "").lower()
!= dependency[2].lower()
)
# Field is removed and part of an index/unique_together
elif dependency[2] is not None and dependency[3] == "foo_together_change":
return (
isinstance(
operation,
(operations.AlterUniqueTogether, operations.AlterIndexTogether),
)
and operation.name_lower == dependency[1].lower()
)
# Unknown dependency. Raise an error.
else:
raise ValueError("Can't handle dependency %r" % (dependency,))
def add_operation(self, app_label, operation, dependencies=None, beginning=False):
# Dependencies are
# (app_label, model_name, field_name, create/delete as True/False)
operation._auto_deps = dependencies or []
if beginning:
self.generated_operations.setdefault(app_label, []).insert(0, operation)
else:
self.generated_operations.setdefault(app_label, []).append(operation)
def swappable_first_key(self, item):
"""
Place potential swappable models first in lists of created models (only
real way to solve #22783).
"""
try:
model_state = self.to_state.models[item]
base_names = {
base if isinstance(base, str) else base.__name__
for base in model_state.bases
}
string_version = "%s.%s" % (item[0], item[1])
if (
model_state.options.get("swappable")
or "AbstractUser" in base_names
or "AbstractBaseUser" in base_names
or settings.AUTH_USER_MODEL.lower() == string_version.lower()
):
return ("___" + item[0], "___" + item[1])
except LookupError:
pass
return item
def generate_renamed_models(self):
"""
Find any renamed models, generate the operations for them, and remove
the old entry from the model lists. Must be run before other
model-level generation.
"""
self.renamed_models = {}
self.renamed_models_rel = {}
added_models = self.new_model_keys - self.old_model_keys
for app_label, model_name in sorted(added_models):
model_state = self.to_state.models[app_label, model_name]
model_fields_def = self.only_relation_agnostic_fields(model_state.fields)
removed_models = self.old_model_keys - self.new_model_keys
for rem_app_label, rem_model_name in removed_models:
if rem_app_label == app_label:
rem_model_state = self.from_state.models[
rem_app_label, rem_model_name
]
rem_model_fields_def = self.only_relation_agnostic_fields(
rem_model_state.fields
)
if model_fields_def == rem_model_fields_def:
if self.questioner.ask_rename_model(
rem_model_state, model_state
):
dependencies = []
fields = list(model_state.fields.values()) + [
field.remote_field
for relations in self.to_state.relations[
app_label, model_name
].values()
for field in relations.values()
]
for field in fields:
if field.is_relation:
dependencies.extend(
self._get_dependencies_for_foreign_key(
app_label,
model_name,
field,
self.to_state,
)
)
self.add_operation(
app_label,
operations.RenameModel(
old_name=rem_model_state.name,
new_name=model_state.name,
),
dependencies=dependencies,
)
self.renamed_models[app_label, model_name] = rem_model_name
renamed_models_rel_key = "%s.%s" % (
rem_model_state.app_label,
rem_model_state.name_lower,
)
self.renamed_models_rel[
renamed_models_rel_key
] = "%s.%s" % (
model_state.app_label,
model_state.name_lower,
)
self.old_model_keys.remove((rem_app_label, rem_model_name))
self.old_model_keys.add((app_label, model_name))
break
def generate_created_models(self):
"""
Find all new models (both managed and unmanaged) and make create
operations for them as well as separate operations to create any
foreign key or M2M relationships (these are optimized later, if
possible).
Defer any model options that refer to collections of fields that might
be deferred (e.g. unique_together, index_together).
"""
old_keys = self.old_model_keys | self.old_unmanaged_keys
added_models = self.new_model_keys - old_keys
added_unmanaged_models = self.new_unmanaged_keys - old_keys
all_added_models = chain(
sorted(added_models, key=self.swappable_first_key, reverse=True),
sorted(added_unmanaged_models, key=self.swappable_first_key, reverse=True),
)
for app_label, model_name in all_added_models:
model_state = self.to_state.models[app_label, model_name]
# Gather related fields
related_fields = {}
primary_key_rel = None
for field_name, field in model_state.fields.items():
if field.remote_field:
if field.remote_field.model:
if field.primary_key:
primary_key_rel = field.remote_field.model
elif not field.remote_field.parent_link:
related_fields[field_name] = field
if getattr(field.remote_field, "through", None):
related_fields[field_name] = field
# Are there indexes/unique|index_together to defer?
indexes = model_state.options.pop("indexes")
constraints = model_state.options.pop("constraints")
unique_together = model_state.options.pop("unique_together", None)
# RemovedInDjango51Warning.
index_together = model_state.options.pop("index_together", None)
order_with_respect_to = model_state.options.pop(
"order_with_respect_to", None
)
# Depend on the deletion of any possible proxy version of us
dependencies = [
(app_label, model_name, None, False),
]
# Depend on all bases
for base in model_state.bases:
if isinstance(base, str) and "." in base:
base_app_label, base_name = base.split(".", 1)
dependencies.append((base_app_label, base_name, None, True))
# Depend on the removal of base fields if the new model has
# a field with the same name.
old_base_model_state = self.from_state.models.get(
(base_app_label, base_name)
)
new_base_model_state = self.to_state.models.get(
(base_app_label, base_name)
)
if old_base_model_state and new_base_model_state:
removed_base_fields = (
set(old_base_model_state.fields)
.difference(
new_base_model_state.fields,
)
.intersection(model_state.fields)
)
for removed_base_field in removed_base_fields:
dependencies.append(
(base_app_label, base_name, removed_base_field, False)
)
# Depend on the other end of the primary key if it's a relation
if primary_key_rel:
dependencies.append(
resolve_relation(
primary_key_rel,
app_label,
model_name,
)
+ (None, True)
)
# Generate creation operation
self.add_operation(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[
d
for d in model_state.fields.items()
if d[0] not in related_fields
],
options=model_state.options,
bases=model_state.bases,
managers=model_state.managers,
),
dependencies=dependencies,
beginning=True,
)
# Don't add operations which modify the database for unmanaged models
if not model_state.options.get("managed", True):
continue
# Generate operations for each related field
for name, field in sorted(related_fields.items()):
dependencies = self._get_dependencies_for_foreign_key(
app_label,
model_name,
field,
self.to_state,
)
# Depend on our own model being created
dependencies.append((app_label, model_name, None, True))
# Make operation
self.add_operation(
app_label,
operations.AddField(
model_name=model_name,
name=name,
field=field,
),
dependencies=list(set(dependencies)),
)
# Generate other opns
if order_with_respect_to:
self.add_operation(
app_label,
operations.AlterOrderWithRespectTo(
name=model_name,
order_with_respect_to=order_with_respect_to,
),
dependencies=[
(app_label, model_name, order_with_respect_to, True),
(app_label, model_name, None, True),
],
)
related_dependencies = [
(app_label, model_name, name, True) for name in sorted(related_fields)
]
related_dependencies.append((app_label, model_name, None, True))
for index in indexes:
self.add_operation(
app_label,
operations.AddIndex(
model_name=model_name,
index=index,
),
dependencies=related_dependencies,
)
for constraint in constraints:
self.add_operation(
app_label,
operations.AddConstraint(
model_name=model_name,
constraint=constraint,
),
dependencies=related_dependencies,
)
if unique_together:
self.add_operation(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=unique_together,
),
dependencies=related_dependencies,
)
# RemovedInDjango51Warning.
if index_together:
self.add_operation(
app_label,
operations.AlterIndexTogether(
name=model_name,
index_together=index_together,
),
dependencies=related_dependencies,
)
# Fix relationships if the model changed from a proxy model to a
# concrete model.
relations = self.to_state.relations
if (app_label, model_name) in self.old_proxy_keys:
for related_model_key, related_fields in relations[
app_label, model_name
].items():
related_model_state = self.to_state.models[related_model_key]
for related_field_name, related_field in related_fields.items():
self.add_operation(
related_model_state.app_label,
operations.AlterField(
model_name=related_model_state.name,
name=related_field_name,
field=related_field,
),
dependencies=[(app_label, model_name, None, True)],
)
def generate_created_proxies(self):
"""
Make CreateModel statements for proxy models. Use the same statements
as that way there's less code duplication, but for proxy models it's
safe to skip all the pointless field stuff and chuck out an operation.
"""
added = self.new_proxy_keys - self.old_proxy_keys
for app_label, model_name in sorted(added):
model_state = self.to_state.models[app_label, model_name]
assert model_state.options.get("proxy")
# Depend on the deletion of any possible non-proxy version of us
dependencies = [
(app_label, model_name, None, False),
]
# Depend on all bases
for base in model_state.bases:
if isinstance(base, str) and "." in base:
base_app_label, base_name = base.split(".", 1)
dependencies.append((base_app_label, base_name, None, True))
# Generate creation operation
self.add_operation(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[],
options=model_state.options,
bases=model_state.bases,
managers=model_state.managers,
),
# Depend on the deletion of any possible non-proxy version of us
dependencies=dependencies,
)
def generate_deleted_models(self):
"""
Find all deleted models (managed and unmanaged) and make delete
operations for them as well as separate operations to delete any
foreign key or M2M relationships (these are optimized later, if
possible).
Also bring forward removal of any model options that refer to
collections of fields - the inverse of generate_created_models().
"""
new_keys = self.new_model_keys | self.new_unmanaged_keys
deleted_models = self.old_model_keys - new_keys
deleted_unmanaged_models = self.old_unmanaged_keys - new_keys
all_deleted_models = chain(
sorted(deleted_models), sorted(deleted_unmanaged_models)
)
for app_label, model_name in all_deleted_models:
model_state = self.from_state.models[app_label, model_name]
# Gather related fields
related_fields = {}
for field_name, field in model_state.fields.items():
if field.remote_field:
if field.remote_field.model:
related_fields[field_name] = field
if getattr(field.remote_field, "through", None):
related_fields[field_name] = field
# Generate option removal first
unique_together = model_state.options.pop("unique_together", None)
# RemovedInDjango51Warning.
index_together = model_state.options.pop("index_together", None)
if unique_together:
self.add_operation(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=None,
),
)
# RemovedInDjango51Warning.
if index_together:
self.add_operation(
app_label,
operations.AlterIndexTogether(
name=model_name,
index_together=None,
),
)
# Then remove each related field
for name in sorted(related_fields):
self.add_operation(
app_label,
operations.RemoveField(
model_name=model_name,
name=name,
),
)
# Finally, remove the model.
# This depends on both the removal/alteration of all incoming fields
# and the removal of all its own related fields, and if it's
# a through model the field that references it.
dependencies = []
relations = self.from_state.relations
for (
related_object_app_label,
object_name,
), relation_related_fields in relations[app_label, model_name].items():
for field_name, field in relation_related_fields.items():
dependencies.append(
(related_object_app_label, object_name, field_name, False),
)
if not field.many_to_many:
dependencies.append(
(
related_object_app_label,
object_name,
field_name,
"alter",
),
)
for name in sorted(related_fields):
dependencies.append((app_label, model_name, name, False))
# We're referenced in another field's through=
through_user = self.through_users.get((app_label, model_state.name_lower))
if through_user:
dependencies.append(
(through_user[0], through_user[1], through_user[2], False)
)
# Finally, make the operation, deduping any dependencies
self.add_operation(
app_label,
operations.DeleteModel(
name=model_state.name,
),
dependencies=list(set(dependencies)),
)
def generate_deleted_proxies(self):
"""Make DeleteModel options for proxy models."""
deleted = self.old_proxy_keys - self.new_proxy_keys
for app_label, model_name in sorted(deleted):
model_state = self.from_state.models[app_label, model_name]
assert model_state.options.get("proxy")
self.add_operation(
app_label,
operations.DeleteModel(
name=model_state.name,
),
)
def create_renamed_fields(self):
"""Work out renamed fields."""
self.renamed_operations = []
old_field_keys = self.old_field_keys.copy()
for app_label, model_name, field_name in sorted(
self.new_field_keys - old_field_keys
):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
field = new_model_state.get_field(field_name)
# Scan to see if this is actually a rename!
field_dec = self.deep_deconstruct(field)
for rem_app_label, rem_model_name, rem_field_name in sorted(
old_field_keys - self.new_field_keys
):
if rem_app_label == app_label and rem_model_name == model_name:
old_field = old_model_state.get_field(rem_field_name)
old_field_dec = self.deep_deconstruct(old_field)
if (
field.remote_field
and field.remote_field.model
and "to" in old_field_dec[2]
):
old_rel_to = old_field_dec[2]["to"]
if old_rel_to in self.renamed_models_rel:
old_field_dec[2]["to"] = self.renamed_models_rel[old_rel_to]
old_field.set_attributes_from_name(rem_field_name)
old_db_column = old_field.get_attname_column()[1]
if old_field_dec == field_dec or (
# Was the field renamed and db_column equal to the
# old field's column added?
old_field_dec[0:2] == field_dec[0:2]
and dict(old_field_dec[2], db_column=old_db_column)
== field_dec[2]
):
if self.questioner.ask_rename(
model_name, rem_field_name, field_name, field
):
self.renamed_operations.append(
(
rem_app_label,
rem_model_name,
old_field.db_column,
rem_field_name,
app_label,
model_name,
field,
field_name,
)
)
old_field_keys.remove(
(rem_app_label, rem_model_name, rem_field_name)
)
old_field_keys.add((app_label, model_name, field_name))
self.renamed_fields[
app_label, model_name, field_name
] = rem_field_name
break
def generate_renamed_fields(self):
"""Generate RenameField operations."""
for (
rem_app_label,
rem_model_name,
rem_db_column,
rem_field_name,
app_label,
model_name,
field,
field_name,
) in self.renamed_operations:
# A db_column mismatch requires a prior noop AlterField for the
# subsequent RenameField to be a noop on attempts at preserving the
# old name.
if rem_db_column != field.db_column:
altered_field = field.clone()
altered_field.name = rem_field_name
self.add_operation(
app_label,
operations.AlterField(
model_name=model_name,
name=rem_field_name,
field=altered_field,
),
)
self.add_operation(
app_label,
operations.RenameField(
model_name=model_name,
old_name=rem_field_name,
new_name=field_name,
),
)
self.old_field_keys.remove((rem_app_label, rem_model_name, rem_field_name))
self.old_field_keys.add((app_label, model_name, field_name))
def generate_added_fields(self):
"""Make AddField operations."""
for app_label, model_name, field_name in sorted(
self.new_field_keys - self.old_field_keys
):
self._generate_added_field(app_label, model_name, field_name)
def _generate_added_field(self, app_label, model_name, field_name):
field = self.to_state.models[app_label, model_name].get_field(field_name)
# Adding a field always depends at least on its removal.
dependencies = [(app_label, model_name, field_name, False)]
# Fields that are foreignkeys/m2ms depend on stuff.
if field.remote_field and field.remote_field.model:
dependencies.extend(
self._get_dependencies_for_foreign_key(
app_label,
model_name,
field,
self.to_state,
)
)
# You can't just add NOT NULL fields with no default or fields
# which don't allow empty strings as default.
time_fields = (models.DateField, models.DateTimeField, models.TimeField)
preserve_default = (
field.null
or field.has_default()
or field.many_to_many
or (field.blank and field.empty_strings_allowed)
or (isinstance(field, time_fields) and field.auto_now)
)
if not preserve_default:
field = field.clone()
if isinstance(field, time_fields) and field.auto_now_add:
field.default = self.questioner.ask_auto_now_add_addition(
field_name, model_name
)
else:
field.default = self.questioner.ask_not_null_addition(
field_name, model_name
)
if (
field.unique
and field.default is not models.NOT_PROVIDED
and callable(field.default)
):
self.questioner.ask_unique_callable_default_addition(field_name, model_name)
self.add_operation(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=preserve_default,
),
dependencies=dependencies,
)
def generate_removed_fields(self):
"""Make RemoveField operations."""
for app_label, model_name, field_name in sorted(
self.old_field_keys - self.new_field_keys
):
self._generate_removed_field(app_label, model_name, field_name)
def _generate_removed_field(self, app_label, model_name, field_name):
self.add_operation(
app_label,
operations.RemoveField(
model_name=model_name,
name=field_name,
),
# We might need to depend on the removal of an
# order_with_respect_to or index/unique_together operation;
# this is safely ignored if there isn't one
dependencies=[
(app_label, model_name, field_name, "order_wrt_unset"),
(app_label, model_name, field_name, "foo_together_change"),
],
)
def generate_altered_fields(self):
"""
Make AlterField operations, or possibly RemovedField/AddField if alter
isn't possible.
"""
for app_label, model_name, field_name in sorted(
self.old_field_keys & self.new_field_keys
):
# Did the field change?
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_field_name = self.renamed_fields.get(
(app_label, model_name, field_name), field_name
)
old_field = self.from_state.models[app_label, old_model_name].get_field(
old_field_name
)
new_field = self.to_state.models[app_label, model_name].get_field(
field_name
)
dependencies = []
# Implement any model renames on relations; these are handled by RenameModel
# so we need to exclude them from the comparison
if hasattr(new_field, "remote_field") and getattr(
new_field.remote_field, "model", None
):
rename_key = resolve_relation(
new_field.remote_field.model, app_label, model_name
)
if rename_key in self.renamed_models:
new_field.remote_field.model = old_field.remote_field.model
# Handle ForeignKey which can only have a single to_field.
remote_field_name = getattr(new_field.remote_field, "field_name", None)
if remote_field_name:
to_field_rename_key = rename_key + (remote_field_name,)
if to_field_rename_key in self.renamed_fields:
# Repoint both model and field name because to_field
# inclusion in ForeignKey.deconstruct() is based on
# both.
new_field.remote_field.model = old_field.remote_field.model
new_field.remote_field.field_name = (
old_field.remote_field.field_name
)
# Handle ForeignObjects which can have multiple from_fields/to_fields.
from_fields = getattr(new_field, "from_fields", None)
if from_fields:
from_rename_key = (app_label, model_name)
new_field.from_fields = tuple(
[
self.renamed_fields.get(
from_rename_key + (from_field,), from_field
)
for from_field in from_fields
]
)
new_field.to_fields = tuple(
[
self.renamed_fields.get(rename_key + (to_field,), to_field)
for to_field in new_field.to_fields
]
)
dependencies.extend(
self._get_dependencies_for_foreign_key(
app_label,
model_name,
new_field,
self.to_state,
)
)
if hasattr(new_field, "remote_field") and getattr(
new_field.remote_field, "through", None
):
rename_key = resolve_relation(
new_field.remote_field.through, app_label, model_name
)
if rename_key in self.renamed_models:
new_field.remote_field.through = old_field.remote_field.through
old_field_dec = self.deep_deconstruct(old_field)
new_field_dec = self.deep_deconstruct(new_field)
# If the field was confirmed to be renamed it means that only
# db_column was allowed to change which generate_renamed_fields()
# already accounts for by adding an AlterField operation.
if old_field_dec != new_field_dec and old_field_name == field_name:
both_m2m = old_field.many_to_many and new_field.many_to_many
neither_m2m = not old_field.many_to_many and not new_field.many_to_many
if both_m2m or neither_m2m:
# Either both fields are m2m or neither is
preserve_default = True
if (
old_field.null
and not new_field.null
and not new_field.has_default()
and not new_field.many_to_many
):
field = new_field.clone()
new_default = self.questioner.ask_not_null_alteration(
field_name, model_name
)
if new_default is not models.NOT_PROVIDED:
field.default = new_default
preserve_default = False
else:
field = new_field
self.add_operation(
app_label,
operations.AlterField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=preserve_default,
),
dependencies=dependencies,
)
else:
# We cannot alter between m2m and concrete fields
self._generate_removed_field(app_label, model_name, field_name)
self._generate_added_field(app_label, model_name, field_name)
def create_altered_indexes(self):
option_name = operations.AddIndex.option_name
self.renamed_index_together_values = defaultdict(list)
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_indexes = old_model_state.options[option_name]
new_indexes = new_model_state.options[option_name]
added_indexes = [idx for idx in new_indexes if idx not in old_indexes]
removed_indexes = [idx for idx in old_indexes if idx not in new_indexes]
renamed_indexes = []
# Find renamed indexes.
remove_from_added = []
remove_from_removed = []
for new_index in added_indexes:
new_index_dec = new_index.deconstruct()
new_index_name = new_index_dec[2].pop("name")
for old_index in removed_indexes:
old_index_dec = old_index.deconstruct()
old_index_name = old_index_dec[2].pop("name")
# Indexes are the same except for the names.
if (
new_index_dec == old_index_dec
and new_index_name != old_index_name
):
renamed_indexes.append((old_index_name, new_index_name, None))
remove_from_added.append(new_index)
remove_from_removed.append(old_index)
# Find index_together changed to indexes.
for (
old_value,
new_value,
index_together_app_label,
index_together_model_name,
dependencies,
) in self._get_altered_foo_together_operations(
operations.AlterIndexTogether.option_name
):
if (
app_label != index_together_app_label
or model_name != index_together_model_name
):
continue
removed_values = old_value.difference(new_value)
for removed_index_together in removed_values:
renamed_index_together_indexes = []
for new_index in added_indexes:
_, args, kwargs = new_index.deconstruct()
# Ensure only 'fields' are defined in the Index.
if (
not args
and new_index.fields == list(removed_index_together)
and set(kwargs) == {"name", "fields"}
):
renamed_index_together_indexes.append(new_index)
if len(renamed_index_together_indexes) == 1:
renamed_index = renamed_index_together_indexes[0]
remove_from_added.append(renamed_index)
renamed_indexes.append(
(None, renamed_index.name, removed_index_together)
)
self.renamed_index_together_values[
index_together_app_label, index_together_model_name
].append(removed_index_together)
# Remove renamed indexes from the lists of added and removed
# indexes.
added_indexes = [
idx for idx in added_indexes if idx not in remove_from_added
]
removed_indexes = [
idx for idx in removed_indexes if idx not in remove_from_removed
]
self.altered_indexes.update(
{
(app_label, model_name): {
"added_indexes": added_indexes,
"removed_indexes": removed_indexes,
"renamed_indexes": renamed_indexes,
}
}
)
def generate_added_indexes(self):
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
for index in alt_indexes["added_indexes"]:
self.add_operation(
app_label,
operations.AddIndex(
model_name=model_name,
index=index,
),
)
def generate_removed_indexes(self):
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
for index in alt_indexes["removed_indexes"]:
self.add_operation(
app_label,
operations.RemoveIndex(
model_name=model_name,
name=index.name,
),
)
def generate_renamed_indexes(self):
for (app_label, model_name), alt_indexes in self.altered_indexes.items():
for old_index_name, new_index_name, old_fields in alt_indexes[
"renamed_indexes"
]:
self.add_operation(
app_label,
operations.RenameIndex(
model_name=model_name,
new_name=new_index_name,
old_name=old_index_name,
old_fields=old_fields,
),
)
def create_altered_constraints(self):
option_name = operations.AddConstraint.option_name
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_constraints = old_model_state.options[option_name]
new_constraints = new_model_state.options[option_name]
add_constraints = [c for c in new_constraints if c not in old_constraints]
rem_constraints = [c for c in old_constraints if c not in new_constraints]
self.altered_constraints.update(
{
(app_label, model_name): {
"added_constraints": add_constraints,
"removed_constraints": rem_constraints,
}
}
)
def generate_added_constraints(self):
for (
app_label,
model_name,
), alt_constraints in self.altered_constraints.items():
for constraint in alt_constraints["added_constraints"]:
self.add_operation(
app_label,
operations.AddConstraint(
model_name=model_name,
constraint=constraint,
),
)
def generate_removed_constraints(self):
for (
app_label,
model_name,
), alt_constraints in self.altered_constraints.items():
for constraint in alt_constraints["removed_constraints"]:
self.add_operation(
app_label,
operations.RemoveConstraint(
model_name=model_name,
name=constraint.name,
),
)
@staticmethod
def _get_dependencies_for_foreign_key(app_label, model_name, field, project_state):
remote_field_model = None
if hasattr(field.remote_field, "model"):
remote_field_model = field.remote_field.model
else:
relations = project_state.relations[app_label, model_name]
for (remote_app_label, remote_model_name), fields in relations.items():
if any(
field == related_field.remote_field
for related_field in fields.values()
):
remote_field_model = f"{remote_app_label}.{remote_model_name}"
break
# Account for FKs to swappable models
swappable_setting = getattr(field, "swappable_setting", None)
if swappable_setting is not None:
dep_app_label = "__setting__"
dep_object_name = swappable_setting
else:
dep_app_label, dep_object_name = resolve_relation(
remote_field_model,
app_label,
model_name,
)
dependencies = [(dep_app_label, dep_object_name, None, True)]
if getattr(field.remote_field, "through", None):
through_app_label, through_object_name = resolve_relation(
field.remote_field.through,
app_label,
model_name,
)
dependencies.append((through_app_label, through_object_name, None, True))
return dependencies
def _get_altered_foo_together_operations(self, option_name):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
# We run the old version through the field renames to account for those
old_value = old_model_state.options.get(option_name)
old_value = (
{
tuple(
self.renamed_fields.get((app_label, model_name, n), n)
for n in unique
)
for unique in old_value
}
if old_value
else set()
)
new_value = new_model_state.options.get(option_name)
new_value = set(new_value) if new_value else set()
if old_value != new_value:
dependencies = []
for foo_togethers in new_value:
for field_name in foo_togethers:
field = new_model_state.get_field(field_name)
if field.remote_field and field.remote_field.model:
dependencies.extend(
self._get_dependencies_for_foreign_key(
app_label,
model_name,
field,
self.to_state,
)
)
yield (
old_value,
new_value,
app_label,
model_name,
dependencies,
)
def _generate_removed_altered_foo_together(self, operation):
for (
old_value,
new_value,
app_label,
model_name,
dependencies,
) in self._get_altered_foo_together_operations(operation.option_name):
if operation == operations.AlterIndexTogether:
old_value = {
value
for value in old_value
if value
not in self.renamed_index_together_values[app_label, model_name]
}
removal_value = new_value.intersection(old_value)
if removal_value or old_value:
self.add_operation(
app_label,
operation(
name=model_name, **{operation.option_name: removal_value}
),
dependencies=dependencies,
)
def generate_removed_altered_unique_together(self):
self._generate_removed_altered_foo_together(operations.AlterUniqueTogether)
# RemovedInDjango51Warning.
def generate_removed_altered_index_together(self):
self._generate_removed_altered_foo_together(operations.AlterIndexTogether)
def _generate_altered_foo_together(self, operation):
for (
old_value,
new_value,
app_label,
model_name,
dependencies,
) in self._get_altered_foo_together_operations(operation.option_name):
removal_value = new_value.intersection(old_value)
if new_value != removal_value:
self.add_operation(
app_label,
operation(name=model_name, **{operation.option_name: new_value}),
dependencies=dependencies,
)
def generate_altered_unique_together(self):
self._generate_altered_foo_together(operations.AlterUniqueTogether)
# RemovedInDjango51Warning.
def generate_altered_index_together(self):
self._generate_altered_foo_together(operations.AlterIndexTogether)
def generate_altered_db_table(self):
models_to_check = self.kept_model_keys.union(
self.kept_proxy_keys, self.kept_unmanaged_keys
)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_db_table_name = old_model_state.options.get("db_table")
new_db_table_name = new_model_state.options.get("db_table")
if old_db_table_name != new_db_table_name:
self.add_operation(
app_label,
operations.AlterModelTable(
name=model_name,
table=new_db_table_name,
),
)
def generate_altered_db_table_comment(self):
models_to_check = self.kept_model_keys.union(
self.kept_proxy_keys, self.kept_unmanaged_keys
)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_db_table_comment = old_model_state.options.get("db_table_comment")
new_db_table_comment = new_model_state.options.get("db_table_comment")
if old_db_table_comment != new_db_table_comment:
self.add_operation(
app_label,
operations.AlterModelTableComment(
name=model_name,
table_comment=new_db_table_comment,
),
)
def generate_altered_options(self):
"""
Work out if any non-schema-affecting options have changed and make an
operation to represent them in state changes (in case Python code in
migrations needs them).
"""
models_to_check = self.kept_model_keys.union(
self.kept_proxy_keys,
self.kept_unmanaged_keys,
# unmanaged converted to managed
self.old_unmanaged_keys & self.new_model_keys,
# managed converted to unmanaged
self.old_model_keys & self.new_unmanaged_keys,
)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_options = {
key: value
for key, value in old_model_state.options.items()
if key in AlterModelOptions.ALTER_OPTION_KEYS
}
new_options = {
key: value
for key, value in new_model_state.options.items()
if key in AlterModelOptions.ALTER_OPTION_KEYS
}
if old_options != new_options:
self.add_operation(
app_label,
operations.AlterModelOptions(
name=model_name,
options=new_options,
),
)
def generate_altered_order_with_respect_to(self):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
if old_model_state.options.get(
"order_with_respect_to"
) != new_model_state.options.get("order_with_respect_to"):
# Make sure it comes second if we're adding
# (removal dependency is part of RemoveField)
dependencies = []
if new_model_state.options.get("order_with_respect_to"):
dependencies.append(
(
app_label,
model_name,
new_model_state.options["order_with_respect_to"],
True,
)
)
# Actually generate the operation
self.add_operation(
app_label,
operations.AlterOrderWithRespectTo(
name=model_name,
order_with_respect_to=new_model_state.options.get(
"order_with_respect_to"
),
),
dependencies=dependencies,
)
def generate_altered_managers(self):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get(
(app_label, model_name), model_name
)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
if old_model_state.managers != new_model_state.managers:
self.add_operation(
app_label,
operations.AlterModelManagers(
name=model_name,
managers=new_model_state.managers,
),
)
def arrange_for_graph(self, changes, graph, migration_name=None):
"""
Take a result from changes() and a MigrationGraph, and fix the names
and dependencies of the changes so they extend the graph from the leaf
nodes for each app.
"""
leaves = graph.leaf_nodes()
name_map = {}
for app_label, migrations in list(changes.items()):
if not migrations:
continue
# Find the app label's current leaf node
app_leaf = None
for leaf in leaves:
if leaf[0] == app_label:
app_leaf = leaf
break
# Do they want an initial migration for this app?
if app_leaf is None and not self.questioner.ask_initial(app_label):
# They don't.
for migration in migrations:
name_map[(app_label, migration.name)] = (app_label, "__first__")
del changes[app_label]
continue
# Work out the next number in the sequence
if app_leaf is None:
next_number = 1
else:
next_number = (self.parse_number(app_leaf[1]) or 0) + 1
# Name each migration
for i, migration in enumerate(migrations):
if i == 0 and app_leaf:
migration.dependencies.append(app_leaf)
new_name_parts = ["%04i" % next_number]
if migration_name:
new_name_parts.append(migration_name)
elif i == 0 and not app_leaf:
new_name_parts.append("initial")
else:
new_name_parts.append(migration.suggest_name()[:100])
new_name = "_".join(new_name_parts)
name_map[(app_label, migration.name)] = (app_label, new_name)
next_number += 1
migration.name = new_name
# Now fix dependencies
for migrations in changes.values():
for migration in migrations:
migration.dependencies = [
name_map.get(d, d) for d in migration.dependencies
]
return changes
def _trim_to_apps(self, changes, app_labels):
"""
Take changes from arrange_for_graph() and set of app labels, and return
a modified set of changes which trims out as many migrations that are
not in app_labels as possible. Note that some other migrations may
still be present as they may be required dependencies.
"""
# Gather other app dependencies in a first pass
app_dependencies = {}
for app_label, migrations in changes.items():
for migration in migrations:
for dep_app_label, name in migration.dependencies:
app_dependencies.setdefault(app_label, set()).add(dep_app_label)
required_apps = set(app_labels)
# Keep resolving till there's no change
old_required_apps = None
while old_required_apps != required_apps:
old_required_apps = set(required_apps)
required_apps.update(
*[app_dependencies.get(app_label, ()) for app_label in required_apps]
)
# Remove all migrations that aren't needed
for app_label in list(changes):
if app_label not in required_apps:
del changes[app_label]
return changes
@classmethod
def parse_number(cls, name):
"""
Given a migration name, try to extract a number from the beginning of
it. For a squashed migration such as '0001_squashed_0004…', return the
second number. If no number is found, return None.
"""
if squashed_match := re.search(r".*_squashed_(\d+)", name):
return int(squashed_match[1])
match = re.match(r"^\d+", name)
if match:
return int(match[0])
return None
|
2adc6b7c6f3c43b2c47a5ced3a594ec1c4a346fdbd991dc3ae833adae086d3db | """
Classes to represent the definitions of aggregate functions.
"""
from django.core.exceptions import FieldError, FullResultSet
from django.db.models.expressions import Case, Func, Star, Value, When
from django.db.models.fields import IntegerField
from django.db.models.functions.comparison import Coalesce
from django.db.models.functions.mixins import (
FixDurationInputMixin,
NumericOutputFieldMixin,
)
__all__ = [
"Aggregate",
"Avg",
"Count",
"Max",
"Min",
"StdDev",
"Sum",
"Variance",
]
class Aggregate(Func):
template = "%(function)s(%(distinct)s%(expressions)s)"
contains_aggregate = True
name = None
filter_template = "%s FILTER (WHERE %%(filter)s)"
window_compatible = True
allow_distinct = False
empty_result_set_value = None
def __init__(
self, *expressions, distinct=False, filter=None, default=None, **extra
):
if distinct and not self.allow_distinct:
raise TypeError("%s does not allow distinct." % self.__class__.__name__)
if default is not None and self.empty_result_set_value is not None:
raise TypeError(f"{self.__class__.__name__} does not allow default.")
self.distinct = distinct
self.filter = filter
self.default = default
super().__init__(*expressions, **extra)
def get_source_fields(self):
# Don't return the filter expression since it's not a source field.
return [e._output_field_or_none for e in super().get_source_expressions()]
def get_source_expressions(self):
source_expressions = super().get_source_expressions()
if self.filter:
return source_expressions + [self.filter]
return source_expressions
def set_source_expressions(self, exprs):
self.filter = self.filter and exprs.pop()
return super().set_source_expressions(exprs)
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
# Aggregates are not allowed in UPDATE queries, so ignore for_save
c = super().resolve_expression(query, allow_joins, reuse, summarize)
c.filter = c.filter and c.filter.resolve_expression(
query, allow_joins, reuse, summarize
)
if not summarize:
# Call Aggregate.get_source_expressions() to avoid
# returning self.filter and including that in this loop.
expressions = super(Aggregate, c).get_source_expressions()
for index, expr in enumerate(expressions):
if expr.contains_aggregate:
before_resolved = self.get_source_expressions()[index]
name = (
before_resolved.name
if hasattr(before_resolved, "name")
else repr(before_resolved)
)
raise FieldError(
"Cannot compute %s('%s'): '%s' is an aggregate"
% (c.name, name, name)
)
if (default := c.default) is None:
return c
if hasattr(default, "resolve_expression"):
default = default.resolve_expression(query, allow_joins, reuse, summarize)
if default._output_field_or_none is None:
default.output_field = c._output_field_or_none
else:
default = Value(default, c._output_field_or_none)
c.default = None # Reset the default argument before wrapping.
coalesce = Coalesce(c, default, output_field=c._output_field_or_none)
coalesce.is_summary = c.is_summary
return coalesce
@property
def default_alias(self):
expressions = self.get_source_expressions()
if len(expressions) == 1 and hasattr(expressions[0], "name"):
return "%s__%s" % (expressions[0].name, self.name.lower())
raise TypeError("Complex expressions require an alias")
def get_group_by_cols(self):
return []
def as_sql(self, compiler, connection, **extra_context):
extra_context["distinct"] = "DISTINCT " if self.distinct else ""
if self.filter:
if connection.features.supports_aggregate_filter_clause:
try:
filter_sql, filter_params = self.filter.as_sql(compiler, connection)
except FullResultSet:
pass
else:
template = self.filter_template % extra_context.get(
"template", self.template
)
sql, params = super().as_sql(
compiler,
connection,
template=template,
filter=filter_sql,
**extra_context,
)
return sql, (*params, *filter_params)
else:
copy = self.copy()
copy.filter = None
source_expressions = copy.get_source_expressions()
condition = When(self.filter, then=source_expressions[0])
copy.set_source_expressions([Case(condition)] + source_expressions[1:])
return super(Aggregate, copy).as_sql(
compiler, connection, **extra_context
)
return super().as_sql(compiler, connection, **extra_context)
def _get_repr_options(self):
options = super()._get_repr_options()
if self.distinct:
options["distinct"] = self.distinct
if self.filter:
options["filter"] = self.filter
return options
class Avg(FixDurationInputMixin, NumericOutputFieldMixin, Aggregate):
function = "AVG"
name = "Avg"
allow_distinct = True
class Count(Aggregate):
function = "COUNT"
name = "Count"
output_field = IntegerField()
allow_distinct = True
empty_result_set_value = 0
def __init__(self, expression, filter=None, **extra):
if expression == "*":
expression = Star()
if isinstance(expression, Star) and filter is not None:
raise ValueError("Star cannot be used with filter. Please specify a field.")
super().__init__(expression, filter=filter, **extra)
class Max(Aggregate):
function = "MAX"
name = "Max"
class Min(Aggregate):
function = "MIN"
name = "Min"
class StdDev(NumericOutputFieldMixin, Aggregate):
name = "StdDev"
def __init__(self, expression, sample=False, **extra):
self.function = "STDDEV_SAMP" if sample else "STDDEV_POP"
super().__init__(expression, **extra)
def _get_repr_options(self):
return {**super()._get_repr_options(), "sample": self.function == "STDDEV_SAMP"}
class Sum(FixDurationInputMixin, Aggregate):
function = "SUM"
name = "Sum"
allow_distinct = True
class Variance(NumericOutputFieldMixin, Aggregate):
name = "Variance"
def __init__(self, expression, sample=False, **extra):
self.function = "VAR_SAMP" if sample else "VAR_POP"
super().__init__(expression, **extra)
def _get_repr_options(self):
return {**super()._get_repr_options(), "sample": self.function == "VAR_SAMP"}
|
a27975a1964ae6d7b27ed41a916b8861f6eb208eb212ce16bd4f5a087dcac01e | import bisect
import copy
import inspect
import warnings
from collections import defaultdict
from django.apps import apps
from django.conf import settings
from django.core.exceptions import FieldDoesNotExist, ImproperlyConfigured
from django.db import connections
from django.db.models import AutoField, Manager, OrderWrt, UniqueConstraint
from django.db.models.query_utils import PathInfo
from django.utils.datastructures import ImmutableList, OrderedSet
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
from django.utils.text import camel_case_to_spaces, format_lazy
from django.utils.translation import override
PROXY_PARENTS = object()
EMPTY_RELATION_TREE = ()
IMMUTABLE_WARNING = (
"The return type of '%s' should never be mutated. If you want to manipulate this "
"list for your own use, make a copy first."
)
DEFAULT_NAMES = (
"verbose_name",
"verbose_name_plural",
"db_table",
"db_table_comment",
"ordering",
"unique_together",
"permissions",
"get_latest_by",
"order_with_respect_to",
"app_label",
"db_tablespace",
"abstract",
"managed",
"proxy",
"swappable",
"auto_created",
# Must be kept for backward compatibility with old migrations.
"index_together",
"apps",
"default_permissions",
"select_on_save",
"default_related_name",
"required_db_features",
"required_db_vendor",
"base_manager_name",
"default_manager_name",
"indexes",
"constraints",
)
def normalize_together(option_together):
"""
option_together can be either a tuple of tuples, or a single
tuple of two strings. Normalize it to a tuple of tuples, so that
calling code can uniformly expect that.
"""
try:
if not option_together:
return ()
if not isinstance(option_together, (tuple, list)):
raise TypeError
first_element = option_together[0]
if not isinstance(first_element, (tuple, list)):
option_together = (option_together,)
# Normalize everything to tuples
return tuple(tuple(ot) for ot in option_together)
except TypeError:
# If the value of option_together isn't valid, return it
# verbatim; this will be picked up by the check framework later.
return option_together
def make_immutable_fields_list(name, data):
return ImmutableList(data, warning=IMMUTABLE_WARNING % name)
class Options:
FORWARD_PROPERTIES = {
"fields",
"many_to_many",
"concrete_fields",
"local_concrete_fields",
"_non_pk_concrete_field_names",
"_forward_fields_map",
"managers",
"managers_map",
"base_manager",
"default_manager",
}
REVERSE_PROPERTIES = {"related_objects", "fields_map", "_relation_tree"}
default_apps = apps
def __init__(self, meta, app_label=None):
self._get_fields_cache = {}
self.local_fields = []
self.local_many_to_many = []
self.private_fields = []
self.local_managers = []
self.base_manager_name = None
self.default_manager_name = None
self.model_name = None
self.verbose_name = None
self.verbose_name_plural = None
self.db_table = ""
self.db_table_comment = ""
self.ordering = []
self._ordering_clash = False
self.indexes = []
self.constraints = []
self.unique_together = []
self.index_together = [] # RemovedInDjango51Warning.
self.select_on_save = False
self.default_permissions = ("add", "change", "delete", "view")
self.permissions = []
self.object_name = None
self.app_label = app_label
self.get_latest_by = None
self.order_with_respect_to = None
self.db_tablespace = settings.DEFAULT_TABLESPACE
self.required_db_features = []
self.required_db_vendor = None
self.meta = meta
self.pk = None
self.auto_field = None
self.abstract = False
self.managed = True
self.proxy = False
# For any class that is a proxy (including automatically created
# classes for deferred object loading), proxy_for_model tells us
# which class this model is proxying. Note that proxy_for_model
# can create a chain of proxy models. For non-proxy models, the
# variable is always None.
self.proxy_for_model = None
# For any non-abstract class, the concrete class is the model
# in the end of the proxy_for_model chain. In particular, for
# concrete models, the concrete_model is always the class itself.
self.concrete_model = None
self.swappable = None
self.parents = {}
self.auto_created = False
# List of all lookups defined in ForeignKey 'limit_choices_to' options
# from *other* models. Needed for some admin checks. Internal use only.
self.related_fkey_lookups = []
# A custom app registry to use, if you're making a separate model set.
self.apps = self.default_apps
self.default_related_name = None
@property
def label(self):
return "%s.%s" % (self.app_label, self.object_name)
@property
def label_lower(self):
return "%s.%s" % (self.app_label, self.model_name)
@property
def app_config(self):
# Don't go through get_app_config to avoid triggering imports.
return self.apps.app_configs.get(self.app_label)
def contribute_to_class(self, cls, name):
from django.db import connection
from django.db.backends.utils import truncate_name
cls._meta = self
self.model = cls
# First, construct the default values for these options.
self.object_name = cls.__name__
self.model_name = self.object_name.lower()
self.verbose_name = camel_case_to_spaces(self.object_name)
# Store the original user-defined values for each option,
# for use when serializing the model definition
self.original_attrs = {}
# Next, apply any overridden values from 'class Meta'.
if self.meta:
meta_attrs = self.meta.__dict__.copy()
for name in self.meta.__dict__:
# Ignore any private attributes that Django doesn't care about.
# NOTE: We can't modify a dictionary's contents while looping
# over it, so we loop over the *original* dictionary instead.
if name.startswith("_"):
del meta_attrs[name]
for attr_name in DEFAULT_NAMES:
if attr_name in meta_attrs:
setattr(self, attr_name, meta_attrs.pop(attr_name))
self.original_attrs[attr_name] = getattr(self, attr_name)
elif hasattr(self.meta, attr_name):
setattr(self, attr_name, getattr(self.meta, attr_name))
self.original_attrs[attr_name] = getattr(self, attr_name)
self.unique_together = normalize_together(self.unique_together)
self.index_together = normalize_together(self.index_together)
if self.index_together:
warnings.warn(
f"'index_together' is deprecated. Use 'Meta.indexes' in "
f"{self.label!r} instead.",
RemovedInDjango51Warning,
)
# App label/class name interpolation for names of constraints and
# indexes.
if not getattr(cls._meta, "abstract", False):
for attr_name in {"constraints", "indexes"}:
objs = getattr(self, attr_name, [])
setattr(self, attr_name, self._format_names_with_class(cls, objs))
# verbose_name_plural is a special case because it uses a 's'
# by default.
if self.verbose_name_plural is None:
self.verbose_name_plural = format_lazy("{}s", self.verbose_name)
# order_with_respect_and ordering are mutually exclusive.
self._ordering_clash = bool(self.ordering and self.order_with_respect_to)
# Any leftover attributes must be invalid.
if meta_attrs != {}:
raise TypeError(
"'class Meta' got invalid attribute(s): %s" % ",".join(meta_attrs)
)
else:
self.verbose_name_plural = format_lazy("{}s", self.verbose_name)
del self.meta
# If the db_table wasn't provided, use the app_label + model_name.
if not self.db_table:
self.db_table = "%s_%s" % (self.app_label, self.model_name)
self.db_table = truncate_name(
self.db_table, connection.ops.max_name_length()
)
def _format_names_with_class(self, cls, objs):
"""App label/class name interpolation for object names."""
new_objs = []
for obj in objs:
obj = obj.clone()
obj.name = obj.name % {
"app_label": cls._meta.app_label.lower(),
"class": cls.__name__.lower(),
}
new_objs.append(obj)
return new_objs
def _get_default_pk_class(self):
pk_class_path = getattr(
self.app_config,
"default_auto_field",
settings.DEFAULT_AUTO_FIELD,
)
if self.app_config and self.app_config._is_default_auto_field_overridden:
app_config_class = type(self.app_config)
source = (
f"{app_config_class.__module__}."
f"{app_config_class.__qualname__}.default_auto_field"
)
else:
source = "DEFAULT_AUTO_FIELD"
if not pk_class_path:
raise ImproperlyConfigured(f"{source} must not be empty.")
try:
pk_class = import_string(pk_class_path)
except ImportError as e:
msg = (
f"{source} refers to the module '{pk_class_path}' that could "
f"not be imported."
)
raise ImproperlyConfigured(msg) from e
if not issubclass(pk_class, AutoField):
raise ValueError(
f"Primary key '{pk_class_path}' referred by {source} must "
f"subclass AutoField."
)
return pk_class
def _prepare(self, model):
if self.order_with_respect_to:
# The app registry will not be ready at this point, so we cannot
# use get_field().
query = self.order_with_respect_to
try:
self.order_with_respect_to = next(
f
for f in self._get_fields(reverse=False)
if f.name == query or f.attname == query
)
except StopIteration:
raise FieldDoesNotExist(
"%s has no field named '%s'" % (self.object_name, query)
)
self.ordering = ("_order",)
if not any(
isinstance(field, OrderWrt) for field in model._meta.local_fields
):
model.add_to_class("_order", OrderWrt())
else:
self.order_with_respect_to = None
if self.pk is None:
if self.parents:
# Promote the first parent link in lieu of adding yet another
# field.
field = next(iter(self.parents.values()))
# Look for a local field with the same name as the
# first parent link. If a local field has already been
# created, use it instead of promoting the parent
already_created = [
fld for fld in self.local_fields if fld.name == field.name
]
if already_created:
field = already_created[0]
field.primary_key = True
self.setup_pk(field)
else:
pk_class = self._get_default_pk_class()
auto = pk_class(verbose_name="ID", primary_key=True, auto_created=True)
model.add_to_class("id", auto)
def add_manager(self, manager):
self.local_managers.append(manager)
self._expire_cache()
def add_field(self, field, private=False):
# Insert the given field in the order in which it was created, using
# the "creation_counter" attribute of the field.
# Move many-to-many related fields from self.fields into
# self.many_to_many.
if private:
self.private_fields.append(field)
elif field.is_relation and field.many_to_many:
bisect.insort(self.local_many_to_many, field)
else:
bisect.insort(self.local_fields, field)
self.setup_pk(field)
# If the field being added is a relation to another known field,
# expire the cache on this field and the forward cache on the field
# being referenced, because there will be new relationships in the
# cache. Otherwise, expire the cache of references *to* this field.
# The mechanism for getting at the related model is slightly odd -
# ideally, we'd just ask for field.related_model. However, related_model
# is a cached property, and all the models haven't been loaded yet, so
# we need to make sure we don't cache a string reference.
if (
field.is_relation
and hasattr(field.remote_field, "model")
and field.remote_field.model
):
try:
field.remote_field.model._meta._expire_cache(forward=False)
except AttributeError:
pass
self._expire_cache()
else:
self._expire_cache(reverse=False)
def setup_pk(self, field):
if not self.pk and field.primary_key:
self.pk = field
field.serialize = False
def setup_proxy(self, target):
"""
Do the internal setup so that the current model is a proxy for
"target".
"""
self.pk = target._meta.pk
self.proxy_for_model = target
self.db_table = target._meta.db_table
def __repr__(self):
return "<Options for %s>" % self.object_name
def __str__(self):
return self.label_lower
def can_migrate(self, connection):
"""
Return True if the model can/should be migrated on the `connection`.
`connection` can be either a real connection or a connection alias.
"""
if self.proxy or self.swapped or not self.managed:
return False
if isinstance(connection, str):
connection = connections[connection]
if self.required_db_vendor:
return self.required_db_vendor == connection.vendor
if self.required_db_features:
return all(
getattr(connection.features, feat, False)
for feat in self.required_db_features
)
return True
@property
def verbose_name_raw(self):
"""Return the untranslated verbose name."""
with override(None):
return str(self.verbose_name)
@property
def swapped(self):
"""
Has this model been swapped out for another? If so, return the model
name of the replacement; otherwise, return None.
For historical reasons, model name lookups using get_model() are
case insensitive, so we make sure we are case insensitive here.
"""
if self.swappable:
swapped_for = getattr(settings, self.swappable, None)
if swapped_for:
try:
swapped_label, swapped_object = swapped_for.split(".")
except ValueError:
# setting not in the format app_label.model_name
# raising ImproperlyConfigured here causes problems with
# test cleanup code - instead it is raised in get_user_model
# or as part of validation.
return swapped_for
if (
"%s.%s" % (swapped_label, swapped_object.lower())
!= self.label_lower
):
return swapped_for
return None
@cached_property
def managers(self):
managers = []
seen_managers = set()
bases = (b for b in self.model.mro() if hasattr(b, "_meta"))
for depth, base in enumerate(bases):
for manager in base._meta.local_managers:
if manager.name in seen_managers:
continue
manager = copy.copy(manager)
manager.model = self.model
seen_managers.add(manager.name)
managers.append((depth, manager.creation_counter, manager))
return make_immutable_fields_list(
"managers",
(m[2] for m in sorted(managers)),
)
@cached_property
def managers_map(self):
return {manager.name: manager for manager in self.managers}
@cached_property
def base_manager(self):
base_manager_name = self.base_manager_name
if not base_manager_name:
# Get the first parent's base_manager_name if there's one.
for parent in self.model.mro()[1:]:
if hasattr(parent, "_meta"):
if parent._base_manager.name != "_base_manager":
base_manager_name = parent._base_manager.name
break
if base_manager_name:
try:
return self.managers_map[base_manager_name]
except KeyError:
raise ValueError(
"%s has no manager named %r"
% (
self.object_name,
base_manager_name,
)
)
manager = Manager()
manager.name = "_base_manager"
manager.model = self.model
manager.auto_created = True
return manager
@cached_property
def default_manager(self):
default_manager_name = self.default_manager_name
if not default_manager_name and not self.local_managers:
# Get the first parent's default_manager_name if there's one.
for parent in self.model.mro()[1:]:
if hasattr(parent, "_meta"):
default_manager_name = parent._meta.default_manager_name
break
if default_manager_name:
try:
return self.managers_map[default_manager_name]
except KeyError:
raise ValueError(
"%s has no manager named %r"
% (
self.object_name,
default_manager_name,
)
)
if self.managers:
return self.managers[0]
@cached_property
def fields(self):
"""
Return a list of all forward fields on the model and its parents,
excluding ManyToManyFields.
Private API intended only to be used by Django itself; get_fields()
combined with filtering of field properties is the public API for
obtaining this field list.
"""
# For legacy reasons, the fields property should only contain forward
# fields that are not private or with a m2m cardinality. Therefore we
# pass these three filters as filters to the generator.
# The third lambda is a longwinded way of checking f.related_model - we don't
# use that property directly because related_model is a cached property,
# and all the models may not have been loaded yet; we don't want to cache
# the string reference to the related_model.
def is_not_an_m2m_field(f):
return not (f.is_relation and f.many_to_many)
def is_not_a_generic_relation(f):
return not (f.is_relation and f.one_to_many)
def is_not_a_generic_foreign_key(f):
return not (
f.is_relation
and f.many_to_one
and not (hasattr(f.remote_field, "model") and f.remote_field.model)
)
return make_immutable_fields_list(
"fields",
(
f
for f in self._get_fields(reverse=False)
if is_not_an_m2m_field(f)
and is_not_a_generic_relation(f)
and is_not_a_generic_foreign_key(f)
),
)
@cached_property
def concrete_fields(self):
"""
Return a list of all concrete fields on the model and its parents.
Private API intended only to be used by Django itself; get_fields()
combined with filtering of field properties is the public API for
obtaining this field list.
"""
return make_immutable_fields_list(
"concrete_fields", (f for f in self.fields if f.concrete)
)
@cached_property
def local_concrete_fields(self):
"""
Return a list of all concrete fields on the model.
Private API intended only to be used by Django itself; get_fields()
combined with filtering of field properties is the public API for
obtaining this field list.
"""
return make_immutable_fields_list(
"local_concrete_fields", (f for f in self.local_fields if f.concrete)
)
@cached_property
def many_to_many(self):
"""
Return a list of all many to many fields on the model and its parents.
Private API intended only to be used by Django itself; get_fields()
combined with filtering of field properties is the public API for
obtaining this list.
"""
return make_immutable_fields_list(
"many_to_many",
(
f
for f in self._get_fields(reverse=False)
if f.is_relation and f.many_to_many
),
)
@cached_property
def related_objects(self):
"""
Return all related objects pointing to the current model. The related
objects can come from a one-to-one, one-to-many, or many-to-many field
relation type.
Private API intended only to be used by Django itself; get_fields()
combined with filtering of field properties is the public API for
obtaining this field list.
"""
all_related_fields = self._get_fields(
forward=False, reverse=True, include_hidden=True
)
return make_immutable_fields_list(
"related_objects",
(
obj
for obj in all_related_fields
if not obj.hidden or obj.field.many_to_many
),
)
@cached_property
def _forward_fields_map(self):
res = {}
fields = self._get_fields(reverse=False)
for field in fields:
res[field.name] = field
# Due to the way Django's internals work, get_field() should also
# be able to fetch a field by attname. In the case of a concrete
# field with relation, includes the *_id name too
try:
res[field.attname] = field
except AttributeError:
pass
return res
@cached_property
def fields_map(self):
res = {}
fields = self._get_fields(forward=False, include_hidden=True)
for field in fields:
res[field.name] = field
# Due to the way Django's internals work, get_field() should also
# be able to fetch a field by attname. In the case of a concrete
# field with relation, includes the *_id name too
try:
res[field.attname] = field
except AttributeError:
pass
return res
def get_field(self, field_name):
"""
Return a field instance given the name of a forward or reverse field.
"""
try:
# In order to avoid premature loading of the relation tree
# (expensive) we prefer checking if the field is a forward field.
return self._forward_fields_map[field_name]
except KeyError:
# If the app registry is not ready, reverse fields are
# unavailable, therefore we throw a FieldDoesNotExist exception.
if not self.apps.models_ready:
raise FieldDoesNotExist(
"%s has no field named '%s'. The app cache isn't ready yet, "
"so if this is an auto-created related field, it won't "
"be available yet." % (self.object_name, field_name)
)
try:
# Retrieve field instance by name from cached or just-computed
# field map.
return self.fields_map[field_name]
except KeyError:
raise FieldDoesNotExist(
"%s has no field named '%s'" % (self.object_name, field_name)
)
def get_base_chain(self, model):
"""
Return a list of parent classes leading to `model` (ordered from
closest to most distant ancestor). This has to handle the case where
`model` is a grandparent or even more distant relation.
"""
if not self.parents:
return []
if model in self.parents:
return [model]
for parent in self.parents:
res = parent._meta.get_base_chain(model)
if res:
res.insert(0, parent)
return res
return []
def get_parent_list(self):
"""
Return all the ancestors of this model as a list ordered by MRO.
Useful for determining if something is an ancestor, regardless of lineage.
"""
result = OrderedSet(self.parents)
for parent in self.parents:
for ancestor in parent._meta.get_parent_list():
result.add(ancestor)
return list(result)
def get_ancestor_link(self, ancestor):
"""
Return the field on the current model which points to the given
"ancestor". This is possible an indirect link (a pointer to a parent
model, which points, eventually, to the ancestor). Used when
constructing table joins for model inheritance.
Return None if the model isn't an ancestor of this one.
"""
if ancestor in self.parents:
return self.parents[ancestor]
for parent in self.parents:
# Tries to get a link field from the immediate parent
parent_link = parent._meta.get_ancestor_link(ancestor)
if parent_link:
# In case of a proxied model, the first link
# of the chain to the ancestor is that parent
# links
return self.parents[parent] or parent_link
def get_path_to_parent(self, parent):
"""
Return a list of PathInfos containing the path from the current
model to the parent model, or an empty list if parent is not a
parent of the current model.
"""
if self.model is parent:
return []
# Skip the chain of proxy to the concrete proxied model.
proxied_model = self.concrete_model
path = []
opts = self
for int_model in self.get_base_chain(parent):
if int_model is proxied_model:
opts = int_model._meta
else:
final_field = opts.parents[int_model]
targets = (final_field.remote_field.get_related_field(),)
opts = int_model._meta
path.append(
PathInfo(
from_opts=final_field.model._meta,
to_opts=opts,
target_fields=targets,
join_field=final_field,
m2m=False,
direct=True,
filtered_relation=None,
)
)
return path
def get_path_from_parent(self, parent):
"""
Return a list of PathInfos containing the path from the parent
model to the current model, or an empty list if parent is not a
parent of the current model.
"""
if self.model is parent:
return []
model = self.concrete_model
# Get a reversed base chain including both the current and parent
# models.
chain = model._meta.get_base_chain(parent)
chain.reverse()
chain.append(model)
# Construct a list of the PathInfos between models in chain.
path = []
for i, ancestor in enumerate(chain[:-1]):
child = chain[i + 1]
link = child._meta.get_ancestor_link(ancestor)
path.extend(link.reverse_path_infos)
return path
def _populate_directed_relation_graph(self):
"""
This method is used by each model to find its reverse objects. As this
method is very expensive and is accessed frequently (it looks up every
field in a model, in every app), it is computed on first access and then
is set as a property on every model.
"""
related_objects_graph = defaultdict(list)
all_models = self.apps.get_models(include_auto_created=True)
for model in all_models:
opts = model._meta
# Abstract model's fields are copied to child models, hence we will
# see the fields from the child models.
if opts.abstract:
continue
fields_with_relations = (
f
for f in opts._get_fields(reverse=False, include_parents=False)
if f.is_relation and f.related_model is not None
)
for f in fields_with_relations:
if not isinstance(f.remote_field.model, str):
remote_label = f.remote_field.model._meta.concrete_model._meta.label
related_objects_graph[remote_label].append(f)
for model in all_models:
# Set the relation_tree using the internal __dict__. In this way
# we avoid calling the cached property. In attribute lookup,
# __dict__ takes precedence over a data descriptor (such as
# @cached_property). This means that the _meta._relation_tree is
# only called if related_objects is not in __dict__.
related_objects = related_objects_graph[
model._meta.concrete_model._meta.label
]
model._meta.__dict__["_relation_tree"] = related_objects
# It seems it is possible that self is not in all_models, so guard
# against that with default for get().
return self.__dict__.get("_relation_tree", EMPTY_RELATION_TREE)
@cached_property
def _relation_tree(self):
return self._populate_directed_relation_graph()
def _expire_cache(self, forward=True, reverse=True):
# This method is usually called by apps.cache_clear(), when the
# registry is finalized, or when a new field is added.
if forward:
for cache_key in self.FORWARD_PROPERTIES:
if cache_key in self.__dict__:
delattr(self, cache_key)
if reverse and not self.abstract:
for cache_key in self.REVERSE_PROPERTIES:
if cache_key in self.__dict__:
delattr(self, cache_key)
self._get_fields_cache = {}
def get_fields(self, include_parents=True, include_hidden=False):
"""
Return a list of fields associated to the model. By default, include
forward and reverse fields, fields derived from inheritance, but not
hidden fields. The returned fields can be changed using the parameters:
- include_parents: include fields derived from inheritance
- include_hidden: include fields that have a related_name that
starts with a "+"
"""
if include_parents is False:
include_parents = PROXY_PARENTS
return self._get_fields(
include_parents=include_parents, include_hidden=include_hidden
)
def _get_fields(
self,
forward=True,
reverse=True,
include_parents=True,
include_hidden=False,
seen_models=None,
):
"""
Internal helper function to return fields of the model.
* If forward=True, then fields defined on this model are returned.
* If reverse=True, then relations pointing to this model are returned.
* If include_hidden=True, then fields with is_hidden=True are returned.
* The include_parents argument toggles if fields from parent models
should be included. It has three values: True, False, and
PROXY_PARENTS. When set to PROXY_PARENTS, the call will return all
fields defined for the current model or any of its parents in the
parent chain to the model's concrete model.
"""
if include_parents not in (True, False, PROXY_PARENTS):
raise TypeError(
"Invalid argument for include_parents: %s" % (include_parents,)
)
# This helper function is used to allow recursion in ``get_fields()``
# implementation and to provide a fast way for Django's internals to
# access specific subsets of fields.
# We must keep track of which models we have already seen. Otherwise we
# could include the same field multiple times from different models.
topmost_call = seen_models is None
if topmost_call:
seen_models = set()
seen_models.add(self.model)
# Creates a cache key composed of all arguments
cache_key = (forward, reverse, include_parents, include_hidden, topmost_call)
try:
# In order to avoid list manipulation. Always return a shallow copy
# of the results.
return self._get_fields_cache[cache_key]
except KeyError:
pass
fields = []
# Recursively call _get_fields() on each parent, with the same
# options provided in this call.
if include_parents is not False:
for parent in self.parents:
# In diamond inheritance it is possible that we see the same
# model from two different routes. In that case, avoid adding
# fields from the same parent again.
if parent in seen_models:
continue
if (
parent._meta.concrete_model != self.concrete_model
and include_parents == PROXY_PARENTS
):
continue
for obj in parent._meta._get_fields(
forward=forward,
reverse=reverse,
include_parents=include_parents,
include_hidden=include_hidden,
seen_models=seen_models,
):
if (
not getattr(obj, "parent_link", False)
or obj.model == self.concrete_model
):
fields.append(obj)
if reverse and not self.proxy:
# Tree is computed once and cached until the app cache is expired.
# It is composed of a list of fields pointing to the current model
# from other models.
all_fields = self._relation_tree
for field in all_fields:
# If hidden fields should be included or the relation is not
# intentionally hidden, add to the fields dict.
if include_hidden or not field.remote_field.hidden:
fields.append(field.remote_field)
if forward:
fields += self.local_fields
fields += self.local_many_to_many
# Private fields are recopied to each child model, and they get a
# different model as field.model in each child. Hence we have to
# add the private fields separately from the topmost call. If we
# did this recursively similar to local_fields, we would get field
# instances with field.model != self.model.
if topmost_call:
fields += self.private_fields
# In order to avoid list manipulation. Always
# return a shallow copy of the results
fields = make_immutable_fields_list("get_fields()", fields)
# Store result into cache for later access
self._get_fields_cache[cache_key] = fields
return fields
@cached_property
def total_unique_constraints(self):
"""
Return a list of total unique constraints. Useful for determining set
of fields guaranteed to be unique for all rows.
"""
return [
constraint
for constraint in self.constraints
if (
isinstance(constraint, UniqueConstraint)
and constraint.condition is None
and not constraint.contains_expressions
)
]
@cached_property
def _property_names(self):
"""Return a set of the names of the properties defined on the model."""
names = []
for name in dir(self.model):
attr = inspect.getattr_static(self.model, name)
if isinstance(attr, property):
names.append(name)
return frozenset(names)
@cached_property
def _non_pk_concrete_field_names(self):
"""
Return a set of the non-pk concrete field names defined on the model.
"""
names = []
for field in self.concrete_fields:
if not field.primary_key:
names.append(field.name)
if field.name != field.attname:
names.append(field.attname)
return frozenset(names)
@cached_property
def db_returning_fields(self):
"""
Private API intended only to be used by Django itself.
Fields to be returned after a database insert.
"""
return [
field
for field in self._get_fields(
forward=True, reverse=False, include_parents=PROXY_PARENTS
)
if getattr(field, "db_returning", False)
]
|
3a62bcb39adca3045e1726a20cbceb19b6f4d3eb0c01deeaf000dfd7dc806423 | import copy
import inspect
import warnings
from functools import partialmethod
from itertools import chain
from asgiref.sync import sync_to_async
import django
from django.apps import apps
from django.conf import settings
from django.core import checks
from django.core.exceptions import (
NON_FIELD_ERRORS,
FieldDoesNotExist,
FieldError,
MultipleObjectsReturned,
ObjectDoesNotExist,
ValidationError,
)
from django.db import (
DJANGO_VERSION_PICKLE_KEY,
DatabaseError,
connection,
connections,
router,
transaction,
)
from django.db.models import NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value
from django.db.models.constants import LOOKUP_SEP
from django.db.models.constraints import CheckConstraint, UniqueConstraint
from django.db.models.deletion import CASCADE, Collector
from django.db.models.expressions import RawSQL
from django.db.models.fields.related import (
ForeignObjectRel,
OneToOneField,
lazy_related_operation,
resolve_relation,
)
from django.db.models.functions import Coalesce
from django.db.models.manager import Manager
from django.db.models.options import Options
from django.db.models.query import F, Q
from django.db.models.signals import (
class_prepared,
post_init,
post_save,
pre_init,
pre_save,
)
from django.db.models.utils import AltersData, make_model_tuple
from django.utils.encoding import force_str
from django.utils.hashable import make_hashable
from django.utils.text import capfirst, get_text_list
from django.utils.translation import gettext_lazy as _
class Deferred:
def __repr__(self):
return "<Deferred field>"
def __str__(self):
return "<Deferred field>"
DEFERRED = Deferred()
def subclass_exception(name, bases, module, attached_to):
"""
Create exception subclass. Used by ModelBase below.
The exception is created in a way that allows it to be pickled, assuming
that the returned exception class will be added as an attribute to the
'attached_to' class.
"""
return type(
name,
bases,
{
"__module__": module,
"__qualname__": "%s.%s" % (attached_to.__qualname__, name),
},
)
def _has_contribute_to_class(value):
# Only call contribute_to_class() if it's bound.
return not inspect.isclass(value) and hasattr(value, "contribute_to_class")
class ModelBase(type):
"""Metaclass for all models."""
def __new__(cls, name, bases, attrs, **kwargs):
super_new = super().__new__
# Also ensure initialization is only performed for subclasses of Model
# (excluding Model class itself).
parents = [b for b in bases if isinstance(b, ModelBase)]
if not parents:
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop("__module__")
new_attrs = {"__module__": module}
classcell = attrs.pop("__classcell__", None)
if classcell is not None:
new_attrs["__classcell__"] = classcell
attr_meta = attrs.pop("Meta", None)
# Pass all attrs without a (Django-specific) contribute_to_class()
# method to type.__new__() so that they're properly initialized
# (i.e. __set_name__()).
contributable_attrs = {}
for obj_name, obj in attrs.items():
if _has_contribute_to_class(obj):
contributable_attrs[obj_name] = obj
else:
new_attrs[obj_name] = obj
new_class = super_new(cls, name, bases, new_attrs, **kwargs)
abstract = getattr(attr_meta, "abstract", False)
meta = attr_meta or getattr(new_class, "Meta", None)
base_meta = getattr(new_class, "_meta", None)
app_label = None
# Look for an application configuration to attach the model to.
app_config = apps.get_containing_app_config(module)
if getattr(meta, "app_label", None) is None:
if app_config is None:
if not abstract:
raise RuntimeError(
"Model class %s.%s doesn't declare an explicit "
"app_label and isn't in an application in "
"INSTALLED_APPS." % (module, name)
)
else:
app_label = app_config.label
new_class.add_to_class("_meta", Options(meta, app_label))
if not abstract:
new_class.add_to_class(
"DoesNotExist",
subclass_exception(
"DoesNotExist",
tuple(
x.DoesNotExist
for x in parents
if hasattr(x, "_meta") and not x._meta.abstract
)
or (ObjectDoesNotExist,),
module,
attached_to=new_class,
),
)
new_class.add_to_class(
"MultipleObjectsReturned",
subclass_exception(
"MultipleObjectsReturned",
tuple(
x.MultipleObjectsReturned
for x in parents
if hasattr(x, "_meta") and not x._meta.abstract
)
or (MultipleObjectsReturned,),
module,
attached_to=new_class,
),
)
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, "ordering"):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, "get_latest_by"):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
# If the model is a proxy, ensure that the base class
# hasn't been swapped out.
if is_proxy and base_meta and base_meta.swapped:
raise TypeError(
"%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)
)
# Add remaining attributes (those with a contribute_to_class() method)
# to the class.
for obj_name, obj in contributable_attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = chain(
new_class._meta.local_fields,
new_class._meta.local_many_to_many,
new_class._meta.private_fields,
)
field_names = {f.name for f in new_fields}
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [kls for kls in parents if hasattr(kls, "_meta")]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError(
"Abstract base class containing model fields not "
"permitted for proxy model '%s'." % name
)
else:
continue
if base is None:
base = parent
elif parent._meta.concrete_model is not base._meta.concrete_model:
raise TypeError(
"Proxy model '%s' has more than one non-abstract model base "
"class." % name
)
if base is None:
raise TypeError(
"Proxy model '%s' has no non-abstract model base class." % name
)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
else:
new_class._meta.concrete_model = new_class
# Collect the parent links for multi-table inheritance.
parent_links = {}
for base in reversed([new_class] + parents):
# Conceptually equivalent to `if base is Model`.
if not hasattr(base, "_meta"):
continue
# Skip concrete parent classes.
if base != new_class and not base._meta.abstract:
continue
# Locate OneToOneField instances.
for field in base._meta.local_fields:
if isinstance(field, OneToOneField) and field.remote_field.parent_link:
related = resolve_relation(new_class, field.remote_field.model)
parent_links[make_model_tuple(related)] = field
# Track fields inherited from base models.
inherited_attributes = set()
# Do the appropriate setup for any model parents.
for base in new_class.mro():
if base not in parents or not hasattr(base, "_meta"):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
inherited_attributes.update(base.__dict__)
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
if not base._meta.abstract:
# Check for clashes between locally declared fields and those
# on the base classes.
for field in parent_fields:
if field.name in field_names:
raise FieldError(
"Local field %r in class %r clashes with field of "
"the same name from base class %r."
% (
field.name,
name,
base.__name__,
)
)
else:
inherited_attributes.add(field.name)
# Concrete classes...
base = base._meta.concrete_model
base_key = make_model_tuple(base)
if base_key in parent_links:
field = parent_links[base_key]
elif not is_proxy:
attr_name = "%s_ptr" % base._meta.model_name
field = OneToOneField(
base,
on_delete=CASCADE,
name=attr_name,
auto_created=True,
parent_link=True,
)
if attr_name in field_names:
raise FieldError(
"Auto-generated field '%s' in class %r for "
"parent_link to base class %r clashes with "
"declared field of the same name."
% (
attr_name,
name,
base.__name__,
)
)
# Only add the ptr field if it's not already present;
# e.g. migrations will already have it specified
if not hasattr(new_class, attr_name):
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
base_parents = base._meta.parents.copy()
# Add fields from abstract base class if it wasn't overridden.
for field in parent_fields:
if (
field.name not in field_names
and field.name not in new_class.__dict__
and field.name not in inherited_attributes
):
new_field = copy.deepcopy(field)
new_class.add_to_class(field.name, new_field)
# Replace parent links defined on this base by the new
# field. It will be appropriately resolved if required.
if field.one_to_one:
for parent, parent_link in base_parents.items():
if field == parent_link:
base_parents[parent] = new_field
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base_parents)
# Inherit private fields (like GenericForeignKey) from the parent
# class
for field in base._meta.private_fields:
if field.name in field_names:
if not base._meta.abstract:
raise FieldError(
"Local field %r in class %r clashes with field of "
"the same name from base class %r."
% (
field.name,
name,
base.__name__,
)
)
else:
field = copy.deepcopy(field)
if not base._meta.abstract:
field.mti_inherited = True
new_class.add_to_class(field.name, field)
# Copy indexes so that index names are unique when models extend an
# abstract model.
new_class._meta.indexes = [
copy.deepcopy(idx) for idx in new_class._meta.indexes
]
if abstract:
# Abstract base models can't be instantiated and don't appear in
# the list of models for an app. We do the final setup for them a
# little differently from normal models.
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
return new_class
def add_to_class(cls, name, value):
if _has_contribute_to_class(value):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
"""Create some methods once self._meta has been populated."""
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = partialmethod(
cls._get_next_or_previous_in_order, is_next=True
)
cls.get_previous_in_order = partialmethod(
cls._get_next_or_previous_in_order, is_next=False
)
# Defer creating accessors on the foreign class until it has been
# created and registered. If remote_field is None, we're ordering
# with respect to a GenericForeignKey and don't know what the
# foreign class is - we'll add those accessors later in
# contribute_to_class().
if opts.order_with_respect_to.remote_field:
wrt = opts.order_with_respect_to
remote = wrt.remote_field.model
lazy_related_operation(make_foreign_order_accessors, cls, remote)
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (
cls.__name__,
", ".join(f.name for f in opts.fields),
)
get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(
opts.label_lower
)
if get_absolute_url_override:
setattr(cls, "get_absolute_url", get_absolute_url_override)
if not opts.managers:
if any(f.name == "objects" for f in opts.fields):
raise ValueError(
"Model %s must specify a custom Manager, because it has a "
"field named 'objects'." % cls.__name__
)
manager = Manager()
manager.auto_created = True
cls.add_to_class("objects", manager)
# Set the name of _meta.indexes. This can't be done in
# Options.contribute_to_class() because fields haven't been added to
# the model at that point.
for index in cls._meta.indexes:
if not index.name:
index.set_name_with_model(cls)
class_prepared.send(sender=cls)
@property
def _base_manager(cls):
return cls._meta.base_manager
@property
def _default_manager(cls):
return cls._meta.default_manager
class ModelStateFieldsCacheDescriptor:
def __get__(self, instance, cls=None):
if instance is None:
return self
res = instance.fields_cache = {}
return res
class ModelState:
"""Store model instance state."""
db = None
# If true, uniqueness validation checks will consider this a new, unsaved
# object. Necessary for correct validation of new instances of objects with
# explicit (non-auto) PKs. This impacts validation only; it has no effect
# on the actual save.
adding = True
fields_cache = ModelStateFieldsCacheDescriptor()
class Model(AltersData, metaclass=ModelBase):
def __init__(self, *args, **kwargs):
# Alias some things as locals to avoid repeat global lookups
cls = self.__class__
opts = self._meta
_setattr = setattr
_DEFERRED = DEFERRED
if opts.abstract:
raise TypeError("Abstract models cannot be instantiated.")
pre_init.send(sender=cls, args=args, kwargs=kwargs)
# Set up the storage for instance state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
if len(args) > len(opts.concrete_fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
if not kwargs:
fields_iter = iter(opts.concrete_fields)
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
for val, field in zip(args, fields_iter):
if val is _DEFERRED:
continue
_setattr(self, field.attname, val)
else:
# Slower, kwargs-ready version.
fields_iter = iter(opts.fields)
for val, field in zip(args, fields_iter):
if val is _DEFERRED:
continue
_setattr(self, field.attname, val)
if kwargs.pop(field.name, NOT_PROVIDED) is not NOT_PROVIDED:
raise TypeError(
f"{cls.__qualname__}() got both positional and "
f"keyword arguments for field '{field.name}'."
)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
is_related_object = False
# Virtual field
if field.attname not in kwargs and field.column is None:
continue
if kwargs:
if isinstance(field.remote_field, ForeignObjectRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
is_related_object = True
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
try:
val = kwargs.pop(field.attname)
except KeyError:
# This is done with an exception rather than the
# default argument on pop because we don't want
# get_default() to be evaluated, and then not used.
# Refs #12057.
val = field.get_default()
else:
val = field.get_default()
if is_related_object:
# If we are passed a related instance, set it using the
# field.name instead of field.attname (e.g. "user" instead of
# "user_id") so that the object gets properly cached (and type
# checked) by the RelatedObjectDescriptor.
if rel_obj is not _DEFERRED:
_setattr(self, field.name, rel_obj)
else:
if val is not _DEFERRED:
_setattr(self, field.attname, val)
if kwargs:
property_names = opts._property_names
unexpected = ()
for prop, value in kwargs.items():
# Any remaining kwargs must correspond to properties or virtual
# fields.
if prop in property_names:
if value is not _DEFERRED:
_setattr(self, prop, value)
else:
try:
opts.get_field(prop)
except FieldDoesNotExist:
unexpected += (prop,)
else:
if value is not _DEFERRED:
_setattr(self, prop, value)
if unexpected:
unexpected_names = ", ".join(repr(n) for n in unexpected)
raise TypeError(
f"{cls.__name__}() got unexpected keyword arguments: "
f"{unexpected_names}"
)
super().__init__()
post_init.send(sender=cls, instance=self)
@classmethod
def from_db(cls, db, field_names, values):
if len(values) != len(cls._meta.concrete_fields):
values_iter = iter(values)
values = [
next(values_iter) if f.attname in field_names else DEFERRED
for f in cls._meta.concrete_fields
]
new = cls(*values)
new._state.adding = False
new._state.db = db
return new
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def __str__(self):
return "%s object (%s)" % (self.__class__.__name__, self.pk)
def __eq__(self, other):
if not isinstance(other, Model):
return NotImplemented
if self._meta.concrete_model != other._meta.concrete_model:
return False
my_pk = self.pk
if my_pk is None:
return self is other
return my_pk == other.pk
def __hash__(self):
if self.pk is None:
raise TypeError("Model instances without primary key value are unhashable")
return hash(self.pk)
def __reduce__(self):
data = self.__getstate__()
data[DJANGO_VERSION_PICKLE_KEY] = django.__version__
class_id = self._meta.app_label, self._meta.object_name
return model_unpickle, (class_id,), data
def __getstate__(self):
"""Hook to allow choosing the attributes to pickle."""
state = self.__dict__.copy()
state["_state"] = copy.copy(state["_state"])
state["_state"].fields_cache = state["_state"].fields_cache.copy()
# memoryview cannot be pickled, so cast it to bytes and store
# separately.
_memoryview_attrs = []
for attr, value in state.items():
if isinstance(value, memoryview):
_memoryview_attrs.append((attr, bytes(value)))
if _memoryview_attrs:
state["_memoryview_attrs"] = _memoryview_attrs
for attr, value in _memoryview_attrs:
state.pop(attr)
return state
def __setstate__(self, state):
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
if pickled_version != django.__version__:
warnings.warn(
"Pickled model instance's Django version %s does not "
"match the current version %s."
% (pickled_version, django.__version__),
RuntimeWarning,
stacklevel=2,
)
else:
warnings.warn(
"Pickled model instance's Django version is not specified.",
RuntimeWarning,
stacklevel=2,
)
if "_memoryview_attrs" in state:
for attr, value in state.pop("_memoryview_attrs"):
state[attr] = memoryview(value)
self.__dict__.update(state)
def _get_pk_val(self, meta=None):
meta = meta or self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
for parent_link in self._meta.parents.values():
if parent_link and parent_link != self._meta.pk:
setattr(self, parent_link.target_field.attname, value)
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def get_deferred_fields(self):
"""
Return a set containing names of deferred fields for this instance.
"""
return {
f.attname
for f in self._meta.concrete_fields
if f.attname not in self.__dict__
}
def refresh_from_db(self, using=None, fields=None):
"""
Reload field values from the database.
By default, the reloading happens from the database this instance was
loaded from, or by the read router if this instance wasn't loaded from
any database. The using parameter will override the default.
Fields can be used to specify which fields to reload. The fields
should be an iterable of field attnames. If fields is None, then
all non-deferred fields are reloaded.
When accessing deferred fields of an instance, the deferred loading
of the field will call this method.
"""
if fields is None:
self._prefetched_objects_cache = {}
else:
prefetched_objects_cache = getattr(self, "_prefetched_objects_cache", ())
for field in fields:
if field in prefetched_objects_cache:
del prefetched_objects_cache[field]
fields.remove(field)
if not fields:
return
if any(LOOKUP_SEP in f for f in fields):
raise ValueError(
'Found "%s" in fields argument. Relations and transforms '
"are not allowed in fields." % LOOKUP_SEP
)
hints = {"instance": self}
db_instance_qs = self.__class__._base_manager.db_manager(
using, hints=hints
).filter(pk=self.pk)
# Use provided fields, if not set then reload all non-deferred fields.
deferred_fields = self.get_deferred_fields()
if fields is not None:
fields = list(fields)
db_instance_qs = db_instance_qs.only(*fields)
elif deferred_fields:
fields = [
f.attname
for f in self._meta.concrete_fields
if f.attname not in deferred_fields
]
db_instance_qs = db_instance_qs.only(*fields)
db_instance = db_instance_qs.get()
non_loaded_fields = db_instance.get_deferred_fields()
for field in self._meta.concrete_fields:
if field.attname in non_loaded_fields:
# This field wasn't refreshed - skip ahead.
continue
setattr(self, field.attname, getattr(db_instance, field.attname))
# Clear cached foreign keys.
if field.is_relation and field.is_cached(self):
field.delete_cached_value(self)
# Clear cached relations.
for field in self._meta.related_objects:
if field.is_cached(self):
field.delete_cached_value(self)
# Clear cached private relations.
for field in self._meta.private_fields:
if field.is_relation and field.is_cached(self):
field.delete_cached_value(self)
self._state.db = db_instance._state.db
async def arefresh_from_db(self, using=None, fields=None):
return await sync_to_async(self.refresh_from_db)(using=using, fields=fields)
def serializable_value(self, field_name):
"""
Return the value of the field name for this instance. If the field is
a foreign key, return the id value instead of the object. If there's
no Field object with this name on the model, return the model
attribute's value.
Used to serialize a field's value (in the serializer, or form output,
for example). Normally, you would just access the attribute directly
and not use this method.
"""
try:
field = self._meta.get_field(field_name)
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(
self, force_insert=False, force_update=False, using=None, update_fields=None
):
"""
Save the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be an SQL insert or update (or equivalent for
non-SQL backends), respectively. Normally, they should not be set.
"""
self._prepare_related_fields_for_save(operation_name="save")
using = using or router.db_for_write(self.__class__, instance=self)
if force_insert and (force_update or update_fields):
raise ValueError("Cannot force both insert and updating in model saving.")
deferred_fields = self.get_deferred_fields()
if update_fields is not None:
# If update_fields is empty, skip the save. We do also check for
# no-op saves later on for inheritance cases. This bailout is
# still needed for skipping signal sending.
if not update_fields:
return
update_fields = frozenset(update_fields)
field_names = self._meta._non_pk_concrete_field_names
non_model_fields = update_fields.difference(field_names)
if non_model_fields:
raise ValueError(
"The following fields do not exist in this model, are m2m "
"fields, or are non-concrete fields: %s"
% ", ".join(non_model_fields)
)
# If saving to the same database, and this model is deferred, then
# automatically do an "update_fields" save on the loaded fields.
elif not force_insert and deferred_fields and using == self._state.db:
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key and not hasattr(field, "through"):
field_names.add(field.attname)
loaded_fields = field_names.difference(deferred_fields)
if loaded_fields:
update_fields = frozenset(loaded_fields)
self.save_base(
using=using,
force_insert=force_insert,
force_update=force_update,
update_fields=update_fields,
)
save.alters_data = True
async def asave(
self, force_insert=False, force_update=False, using=None, update_fields=None
):
return await sync_to_async(self.save)(
force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields,
)
asave.alters_data = True
def save_base(
self,
raw=False,
force_insert=False,
force_update=False,
using=None,
update_fields=None,
):
"""
Handle the parts of saving which should be done only once per save,
yet need to be done in raw saves, too. This includes some sanity
checks and signal sending.
The 'raw' argument is telling save_base not to save any parent
models and not to do any changes to the values before save. This
is used by fixture loading.
"""
using = using or router.db_for_write(self.__class__, instance=self)
assert not (force_insert and (force_update or update_fields))
assert update_fields is None or update_fields
cls = origin = self.__class__
# Skip proxies, but keep the origin as the proxy model.
if cls._meta.proxy:
cls = cls._meta.concrete_model
meta = cls._meta
if not meta.auto_created:
pre_save.send(
sender=origin,
instance=self,
raw=raw,
using=using,
update_fields=update_fields,
)
# A transaction isn't needed if one query is issued.
if meta.parents:
context_manager = transaction.atomic(using=using, savepoint=False)
else:
context_manager = transaction.mark_for_rollback_on_error(using=using)
with context_manager:
parent_inserted = False
if not raw:
parent_inserted = self._save_parents(cls, using, update_fields)
updated = self._save_table(
raw,
cls,
force_insert or parent_inserted,
force_update,
using,
update_fields,
)
# Store the database on which the object was saved
self._state.db = using
# Once saved, this is no longer a to-be-added instance.
self._state.adding = False
# Signal that the save is complete
if not meta.auto_created:
post_save.send(
sender=origin,
instance=self,
created=(not updated),
update_fields=update_fields,
raw=raw,
using=using,
)
save_base.alters_data = True
def _save_parents(self, cls, using, update_fields):
"""Save all the parents of cls using values from self."""
meta = cls._meta
inserted = False
for parent, field in meta.parents.items():
# Make sure the link fields are synced between parent and self.
if (
field
and getattr(self, parent._meta.pk.attname) is None
and getattr(self, field.attname) is not None
):
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
parent_inserted = self._save_parents(
cls=parent, using=using, update_fields=update_fields
)
updated = self._save_table(
cls=parent,
using=using,
update_fields=update_fields,
force_insert=parent_inserted,
)
if not updated:
inserted = True
# Set the parent's PK value to self.
if field:
setattr(self, field.attname, self._get_pk_val(parent._meta))
# Since we didn't have an instance of the parent handy set
# attname directly, bypassing the descriptor. Invalidate
# the related object cache, in case it's been accidentally
# populated. A fresh instance will be re-built from the
# database if necessary.
if field.is_cached(self):
field.delete_cached_value(self)
return inserted
def _save_table(
self,
raw=False,
cls=None,
force_insert=False,
force_update=False,
using=None,
update_fields=None,
):
"""
Do the heavy-lifting involved in saving. Update or insert the data
for a single table.
"""
meta = cls._meta
non_pks = [f for f in meta.local_concrete_fields if not f.primary_key]
if update_fields:
non_pks = [
f
for f in non_pks
if f.name in update_fields or f.attname in update_fields
]
pk_val = self._get_pk_val(meta)
if pk_val is None:
pk_val = meta.pk.get_pk_value_on_save(self)
setattr(self, meta.pk.attname, pk_val)
pk_set = pk_val is not None
if not pk_set and (force_update or update_fields):
raise ValueError("Cannot force an update in save() with no primary key.")
updated = False
# Skip an UPDATE when adding an instance and primary key has a default.
if (
not raw
and not force_insert
and self._state.adding
and meta.pk.default
and meta.pk.default is not NOT_PROVIDED
):
force_insert = True
# If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
if pk_set and not force_insert:
base_qs = cls._base_manager.using(using)
values = [
(
f,
None,
(getattr(self, f.attname) if raw else f.pre_save(self, False)),
)
for f in non_pks
]
forced_update = update_fields or force_update
updated = self._do_update(
base_qs, using, pk_val, values, update_fields, forced_update
)
if force_update and not updated:
raise DatabaseError("Forced update did not affect any rows.")
if update_fields and not updated:
raise DatabaseError("Save with update_fields did not affect any rows.")
if not updated:
if meta.order_with_respect_to:
# If this is a model with an order_with_respect_to
# autopopulate the _order field
field = meta.order_with_respect_to
filter_args = field.get_filter_kwargs_for_object(self)
self._order = (
cls._base_manager.using(using)
.filter(**filter_args)
.aggregate(
_order__max=Coalesce(
ExpressionWrapper(
Max("_order") + Value(1), output_field=IntegerField()
),
Value(0),
),
)["_order__max"]
)
fields = meta.local_concrete_fields
if not pk_set:
fields = [f for f in fields if f is not meta.auto_field]
returning_fields = meta.db_returning_fields
results = self._do_insert(
cls._base_manager, using, fields, returning_fields, raw
)
if results:
for value, field in zip(results[0], returning_fields):
setattr(self, field.attname, value)
return updated
def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
"""
Try to update the model. Return True if the model was updated (if an
update query was done and a matching row was found in the DB).
"""
filtered = base_qs.filter(pk=pk_val)
if not values:
# We can end up here when saving a model in inheritance chain where
# update_fields doesn't target any field in current model. In that
# case we just say the update succeeded. Another case ending up here
# is a model with just PK - in that case check that the PK still
# exists.
return update_fields is not None or filtered.exists()
if self._meta.select_on_save and not forced_update:
return (
filtered.exists()
and
# It may happen that the object is deleted from the DB right after
# this check, causing the subsequent UPDATE to return zero matching
# rows. The same result can occur in some rare cases when the
# database returns zero despite the UPDATE being executed
# successfully (a row is matched and updated). In order to
# distinguish these two cases, the object's existence in the
# database is again checked for if the UPDATE query returns 0.
(filtered._update(values) > 0 or filtered.exists())
)
return filtered._update(values) > 0
def _do_insert(self, manager, using, fields, returning_fields, raw):
"""
Do an INSERT. If returning_fields is defined then this method should
return the newly created data for the model.
"""
return manager._insert(
[self],
fields=fields,
returning_fields=returning_fields,
using=using,
raw=raw,
)
def _prepare_related_fields_for_save(self, operation_name, fields=None):
# Ensure that a model instance without a PK hasn't been assigned to
# a ForeignKey, GenericForeignKey or OneToOneField on this model. If
# the field is nullable, allowing the save would result in silent data
# loss.
for field in self._meta.concrete_fields:
if fields and field not in fields:
continue
# If the related field isn't cached, then an instance hasn't been
# assigned and there's no need to worry about this check.
if field.is_relation and field.is_cached(self):
obj = getattr(self, field.name, None)
if not obj:
continue
# A pk may have been assigned manually to a model instance not
# saved to the database (or auto-generated in a case like
# UUIDField), but we allow the save to proceed and rely on the
# database to raise an IntegrityError if applicable. If
# constraints aren't supported by the database, there's the
# unavoidable risk of data corruption.
if obj.pk is None:
# Remove the object from a related instance cache.
if not field.remote_field.multiple:
field.remote_field.delete_cached_value(obj)
raise ValueError(
"%s() prohibited to prevent data loss due to unsaved "
"related object '%s'." % (operation_name, field.name)
)
elif getattr(self, field.attname) in field.empty_values:
# Set related object if it has been saved after an
# assignment.
setattr(self, field.name, obj)
# If the relationship's pk/to_field was changed, clear the
# cached relationship.
if getattr(obj, field.target_field.attname) != getattr(
self, field.attname
):
field.delete_cached_value(self)
# GenericForeignKeys are private.
for field in self._meta.private_fields:
if fields and field not in fields:
continue
if (
field.is_relation
and field.is_cached(self)
and hasattr(field, "fk_field")
):
obj = field.get_cached_value(self, default=None)
if obj and obj.pk is None:
raise ValueError(
f"{operation_name}() prohibited to prevent data loss due to "
f"unsaved related object '{field.name}'."
)
def delete(self, using=None, keep_parents=False):
if self.pk is None:
raise ValueError(
"%s object can't be deleted because its %s attribute is set "
"to None." % (self._meta.object_name, self._meta.pk.attname)
)
using = using or router.db_for_write(self.__class__, instance=self)
collector = Collector(using=using, origin=self)
collector.collect([self], keep_parents=keep_parents)
return collector.delete()
delete.alters_data = True
async def adelete(self, using=None, keep_parents=False):
return await sync_to_async(self.delete)(
using=using,
keep_parents=keep_parents,
)
adelete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
choices_dict = dict(make_hashable(field.flatchoices))
# force_str() to coerce lazy strings.
return force_str(
choices_dict.get(make_hashable(value), value), strings_only=True
)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
if not self.pk:
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
op = "gt" if is_next else "lt"
order = "" if is_next else "-"
param = getattr(self, field.attname)
q = Q.create([(field.name, param), (f"pk__{op}", self.pk)], connector=Q.AND)
q = Q.create([q, (f"{field.name}__{op}", param)], connector=Q.OR)
qs = (
self.__class__._default_manager.using(self._state.db)
.filter(**kwargs)
.filter(q)
.order_by("%s%s" % (order, field.name), "%spk" % order)
)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist(
"%s matching query does not exist." % self.__class__._meta.object_name
)
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
op = "gt" if is_next else "lt"
order = "_order" if is_next else "-_order"
order_field = self._meta.order_with_respect_to
filter_args = order_field.get_filter_kwargs_for_object(self)
obj = (
self.__class__._default_manager.filter(**filter_args)
.filter(
**{
"_order__%s"
% op: self.__class__._default_manager.values("_order").filter(
**{self._meta.pk.name: self.pk}
)
}
)
.order_by(order)[:1]
.get()
)
setattr(self, cachename, obj)
return getattr(self, cachename)
def _get_field_value_map(self, meta, exclude=None):
if exclude is None:
exclude = set()
meta = meta or self._meta
return {
field.name: Value(getattr(self, field.attname), field)
for field in meta.local_concrete_fields
if field.name not in exclude
}
def prepare_database_save(self, field):
if self.pk is None:
raise ValueError(
"Unsaved model instance %r cannot be used in an ORM query." % self
)
return getattr(self, field.remote_field.get_related_field().attname)
def clean(self):
"""
Hook for doing any extra model-wide validation after clean() has been
called on every field by self.clean_fields. Any ValidationError raised
by this method will not be associated with a particular field; it will
have a special-case association with the field defined by NON_FIELD_ERRORS.
"""
pass
def validate_unique(self, exclude=None):
"""
Check unique constraints on the model and raise ValidationError if any
failed.
"""
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
errors = self._perform_unique_checks(unique_checks)
date_errors = self._perform_date_checks(date_checks)
for k, v in date_errors.items():
errors.setdefault(k, []).extend(v)
if errors:
raise ValidationError(errors)
def _get_unique_checks(self, exclude=None, include_meta_constraints=False):
"""
Return a list of checks to perform. Since validate_unique() could be
called from a ModelForm, some fields may have been excluded; we can't
perform a unique check on a model that is missing fields involved
in that check. Fields that did not validate should also be excluded,
but they need to be passed in via the exclude argument.
"""
if exclude is None:
exclude = set()
unique_checks = []
unique_togethers = [(self.__class__, self._meta.unique_together)]
constraints = []
if include_meta_constraints:
constraints = [(self.__class__, self._meta.total_unique_constraints)]
for parent_class in self._meta.get_parent_list():
if parent_class._meta.unique_together:
unique_togethers.append(
(parent_class, parent_class._meta.unique_together)
)
if include_meta_constraints and parent_class._meta.total_unique_constraints:
constraints.append(
(parent_class, parent_class._meta.total_unique_constraints)
)
for model_class, unique_together in unique_togethers:
for check in unique_together:
if not any(name in exclude for name in check):
# Add the check if the field isn't excluded.
unique_checks.append((model_class, tuple(check)))
if include_meta_constraints:
for model_class, model_constraints in constraints:
for constraint in model_constraints:
if not any(name in exclude for name in constraint.fields):
unique_checks.append((model_class, constraint.fields))
# These are checks for the unique_for_<date/year/month>.
date_checks = []
# Gather a list of checks for fields declared as unique and add them to
# the list of checks.
fields_with_class = [(self.__class__, self._meta.local_fields)]
for parent_class in self._meta.get_parent_list():
fields_with_class.append((parent_class, parent_class._meta.local_fields))
for model_class, fields in fields_with_class:
for f in fields:
name = f.name
if name in exclude:
continue
if f.unique:
unique_checks.append((model_class, (name,)))
if f.unique_for_date and f.unique_for_date not in exclude:
date_checks.append((model_class, "date", name, f.unique_for_date))
if f.unique_for_year and f.unique_for_year not in exclude:
date_checks.append((model_class, "year", name, f.unique_for_year))
if f.unique_for_month and f.unique_for_month not in exclude:
date_checks.append((model_class, "month", name, f.unique_for_month))
return unique_checks, date_checks
def _perform_unique_checks(self, unique_checks):
errors = {}
for model_class, unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
f = self._meta.get_field(field_name)
lookup_value = getattr(self, f.attname)
# TODO: Handle multiple backends with different feature flags.
if lookup_value is None or (
lookup_value == ""
and connection.features.interprets_empty_strings_as_nulls
):
# no value, skip the lookup
continue
if f.primary_key and not self._state.adding:
# no need to check for unique primary key when editing
continue
lookup_kwargs[str(field_name)] = lookup_value
# some fields were skipped, no reason to do the check
if len(unique_check) != len(lookup_kwargs):
continue
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
# Note that we need to use the pk as defined by model_class, not
# self.pk. These can be different fields because model inheritance
# allows single model to have effectively multiple primary keys.
# Refs #17615.
model_class_pk = self._get_pk_val(model_class._meta)
if not self._state.adding and model_class_pk is not None:
qs = qs.exclude(pk=model_class_pk)
if qs.exists():
if len(unique_check) == 1:
key = unique_check[0]
else:
key = NON_FIELD_ERRORS
errors.setdefault(key, []).append(
self.unique_error_message(model_class, unique_check)
)
return errors
def _perform_date_checks(self, date_checks):
errors = {}
for model_class, lookup_type, field, unique_for in date_checks:
lookup_kwargs = {}
# there's a ticket to add a date lookup, we can remove this special
# case if that makes it's way in
date = getattr(self, unique_for)
if date is None:
continue
if lookup_type == "date":
lookup_kwargs["%s__day" % unique_for] = date.day
lookup_kwargs["%s__month" % unique_for] = date.month
lookup_kwargs["%s__year" % unique_for] = date.year
else:
lookup_kwargs["%s__%s" % (unique_for, lookup_type)] = getattr(
date, lookup_type
)
lookup_kwargs[field] = getattr(self, field)
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
if not self._state.adding and self.pk is not None:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.setdefault(field, []).append(
self.date_error_message(lookup_type, field, unique_for)
)
return errors
def date_error_message(self, lookup_type, field_name, unique_for):
opts = self._meta
field = opts.get_field(field_name)
return ValidationError(
message=field.error_messages["unique_for_date"],
code="unique_for_date",
params={
"model": self,
"model_name": capfirst(opts.verbose_name),
"lookup_type": lookup_type,
"field": field_name,
"field_label": capfirst(field.verbose_name),
"date_field": unique_for,
"date_field_label": capfirst(opts.get_field(unique_for).verbose_name),
},
)
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
params = {
"model": self,
"model_class": model_class,
"model_name": capfirst(opts.verbose_name),
"unique_check": unique_check,
}
# A unique field
if len(unique_check) == 1:
field = opts.get_field(unique_check[0])
params["field_label"] = capfirst(field.verbose_name)
return ValidationError(
message=field.error_messages["unique"],
code="unique",
params=params,
)
# unique_together
else:
field_labels = [
capfirst(opts.get_field(f).verbose_name) for f in unique_check
]
params["field_labels"] = get_text_list(field_labels, _("and"))
return ValidationError(
message=_("%(model_name)s with this %(field_labels)s already exists."),
code="unique_together",
params=params,
)
def get_constraints(self):
constraints = [(self.__class__, self._meta.constraints)]
for parent_class in self._meta.get_parent_list():
if parent_class._meta.constraints:
constraints.append((parent_class, parent_class._meta.constraints))
return constraints
def validate_constraints(self, exclude=None):
constraints = self.get_constraints()
using = router.db_for_write(self.__class__, instance=self)
errors = {}
for model_class, model_constraints in constraints:
for constraint in model_constraints:
try:
constraint.validate(model_class, self, exclude=exclude, using=using)
except ValidationError as e:
if e.code == "unique" and len(constraint.fields) == 1:
errors.setdefault(constraint.fields[0], []).append(e)
else:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def full_clean(self, exclude=None, validate_unique=True, validate_constraints=True):
"""
Call clean_fields(), clean(), validate_unique(), and
validate_constraints() on the model. Raise a ValidationError for any
errors that occur.
"""
errors = {}
if exclude is None:
exclude = set()
else:
exclude = set(exclude)
try:
self.clean_fields(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Form.clean() is run even if other validation fails, so do the
# same with Model.clean() for consistency.
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run unique checks, but only for fields that passed validation.
if validate_unique:
for name in errors:
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.add(name)
try:
self.validate_unique(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run constraints checks, but only for fields that passed validation.
if validate_constraints:
for name in errors:
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.add(name)
try:
self.validate_constraints(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
"""
Clean all fields and raise a ValidationError containing a dict
of all validation errors if any occur.
"""
if exclude is None:
exclude = set()
errors = {}
for f in self._meta.fields:
if f.name in exclude:
continue
# Skip validation for empty fields with blank=True. The developer
# is responsible for making sure they have a valid value.
raw_value = getattr(self, f.attname)
if f.blank and raw_value in f.empty_values:
continue
try:
setattr(self, f.attname, f.clean(raw_value, self))
except ValidationError as e:
errors[f.name] = e.error_list
if errors:
raise ValidationError(errors)
@classmethod
def check(cls, **kwargs):
errors = [
*cls._check_swappable(),
*cls._check_model(),
*cls._check_managers(**kwargs),
]
if not cls._meta.swapped:
databases = kwargs.get("databases") or []
errors += [
*cls._check_fields(**kwargs),
*cls._check_m2m_through_same_relationship(),
*cls._check_long_column_names(databases),
]
clash_errors = (
*cls._check_id_field(),
*cls._check_field_name_clashes(),
*cls._check_model_name_db_lookup_clashes(),
*cls._check_property_name_related_field_accessor_clashes(),
*cls._check_single_primary_key(),
)
errors.extend(clash_errors)
# If there are field name clashes, hide consequent column name
# clashes.
if not clash_errors:
errors.extend(cls._check_column_name_clashes())
errors += [
*cls._check_index_together(),
*cls._check_unique_together(),
*cls._check_indexes(databases),
*cls._check_ordering(),
*cls._check_constraints(databases),
*cls._check_default_pk(),
*cls._check_db_table_comment(databases),
]
return errors
@classmethod
def _check_default_pk(cls):
if (
not cls._meta.abstract
and cls._meta.pk.auto_created
and
# Inherited PKs are checked in parents models.
not (
isinstance(cls._meta.pk, OneToOneField)
and cls._meta.pk.remote_field.parent_link
)
and not settings.is_overridden("DEFAULT_AUTO_FIELD")
and cls._meta.app_config
and not cls._meta.app_config._is_default_auto_field_overridden
):
return [
checks.Warning(
f"Auto-created primary key used when not defining a "
f"primary key type, by default "
f"'{settings.DEFAULT_AUTO_FIELD}'.",
hint=(
f"Configure the DEFAULT_AUTO_FIELD setting or the "
f"{cls._meta.app_config.__class__.__qualname__}."
f"default_auto_field attribute to point to a subclass "
f"of AutoField, e.g. 'django.db.models.BigAutoField'."
),
obj=cls,
id="models.W042",
),
]
return []
@classmethod
def _check_db_table_comment(cls, databases):
if not cls._meta.db_table_comment:
return []
errors = []
for db in databases:
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
if not (
connection.features.supports_comments
or "supports_comments" in cls._meta.required_db_features
):
errors.append(
checks.Warning(
f"{connection.display_name} does not support comments on "
f"tables (db_table_comment).",
obj=cls,
id="models.W046",
)
)
return errors
@classmethod
def _check_swappable(cls):
"""Check if the swapped model exists."""
errors = []
if cls._meta.swapped:
try:
apps.get_model(cls._meta.swapped)
except ValueError:
errors.append(
checks.Error(
"'%s' is not of the form 'app_label.app_name'."
% cls._meta.swappable,
id="models.E001",
)
)
except LookupError:
app_label, model_name = cls._meta.swapped.split(".")
errors.append(
checks.Error(
"'%s' references '%s.%s', which has not been "
"installed, or is abstract."
% (cls._meta.swappable, app_label, model_name),
id="models.E002",
)
)
return errors
@classmethod
def _check_model(cls):
errors = []
if cls._meta.proxy:
if cls._meta.local_fields or cls._meta.local_many_to_many:
errors.append(
checks.Error(
"Proxy model '%s' contains model fields." % cls.__name__,
id="models.E017",
)
)
return errors
@classmethod
def _check_managers(cls, **kwargs):
"""Perform all manager checks."""
errors = []
for manager in cls._meta.managers:
errors.extend(manager.check(**kwargs))
return errors
@classmethod
def _check_fields(cls, **kwargs):
"""Perform all field checks."""
errors = []
for field in cls._meta.local_fields:
errors.extend(field.check(**kwargs))
for field in cls._meta.local_many_to_many:
errors.extend(field.check(from_model=cls, **kwargs))
return errors
@classmethod
def _check_m2m_through_same_relationship(cls):
"""Check if no relationship model is used by more than one m2m field."""
errors = []
seen_intermediary_signatures = []
fields = cls._meta.local_many_to_many
# Skip when the target model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase))
# Skip when the relationship model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase))
for f in fields:
signature = (
f.remote_field.model,
cls,
f.remote_field.through,
f.remote_field.through_fields,
)
if signature in seen_intermediary_signatures:
errors.append(
checks.Error(
"The model has two identical many-to-many relations "
"through the intermediate model '%s'."
% f.remote_field.through._meta.label,
obj=cls,
id="models.E003",
)
)
else:
seen_intermediary_signatures.append(signature)
return errors
@classmethod
def _check_id_field(cls):
"""Check if `id` field is a primary key."""
fields = [
f for f in cls._meta.local_fields if f.name == "id" and f != cls._meta.pk
]
# fields is empty or consists of the invalid "id" field
if fields and not fields[0].primary_key and cls._meta.pk.name == "id":
return [
checks.Error(
"'id' can only be used as a field name if the field also "
"sets 'primary_key=True'.",
obj=cls,
id="models.E004",
)
]
else:
return []
@classmethod
def _check_field_name_clashes(cls):
"""Forbid field shadowing in multi-table inheritance."""
errors = []
used_fields = {} # name or attname -> field
# Check that multi-inheritance doesn't cause field name shadowing.
for parent in cls._meta.get_parent_list():
for f in parent._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
if clash:
errors.append(
checks.Error(
"The field '%s' from parent model "
"'%s' clashes with the field '%s' "
"from parent model '%s'."
% (clash.name, clash.model._meta, f.name, f.model._meta),
obj=cls,
id="models.E005",
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
# Check that fields defined in the model don't clash with fields from
# parents, including auto-generated fields like multi-table inheritance
# child accessors.
for parent in cls._meta.get_parent_list():
for f in parent._meta.get_fields():
if f not in used_fields:
used_fields[f.name] = f
for f in cls._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
# Note that we may detect clash between user-defined non-unique
# field "id" and automatically added unique field "id", both
# defined at the same model. This special case is considered in
# _check_id_field and here we ignore it.
id_conflict = (
f.name == "id" and clash and clash.name == "id" and clash.model == cls
)
if clash and not id_conflict:
errors.append(
checks.Error(
"The field '%s' clashes with the field '%s' "
"from model '%s'." % (f.name, clash.name, clash.model._meta),
obj=f,
id="models.E006",
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
return errors
@classmethod
def _check_column_name_clashes(cls):
# Store a list of column names which have already been used by other fields.
used_column_names = []
errors = []
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Ensure the column name is not already in use.
if column_name and column_name in used_column_names:
errors.append(
checks.Error(
"Field '%s' has column name '%s' that is used by "
"another field." % (f.name, column_name),
hint="Specify a 'db_column' for the field.",
obj=cls,
id="models.E007",
)
)
else:
used_column_names.append(column_name)
return errors
@classmethod
def _check_model_name_db_lookup_clashes(cls):
errors = []
model_name = cls.__name__
if model_name.startswith("_") or model_name.endswith("_"):
errors.append(
checks.Error(
"The model name '%s' cannot start or end with an underscore "
"as it collides with the query lookup syntax." % model_name,
obj=cls,
id="models.E023",
)
)
elif LOOKUP_SEP in model_name:
errors.append(
checks.Error(
"The model name '%s' cannot contain double underscores as "
"it collides with the query lookup syntax." % model_name,
obj=cls,
id="models.E024",
)
)
return errors
@classmethod
def _check_property_name_related_field_accessor_clashes(cls):
errors = []
property_names = cls._meta._property_names
related_field_accessors = (
f.get_attname()
for f in cls._meta._get_fields(reverse=False)
if f.is_relation and f.related_model is not None
)
for accessor in related_field_accessors:
if accessor in property_names:
errors.append(
checks.Error(
"The property '%s' clashes with a related field "
"accessor." % accessor,
obj=cls,
id="models.E025",
)
)
return errors
@classmethod
def _check_single_primary_key(cls):
errors = []
if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1:
errors.append(
checks.Error(
"The model cannot have more than one field with "
"'primary_key=True'.",
obj=cls,
id="models.E026",
)
)
return errors
# RemovedInDjango51Warning.
@classmethod
def _check_index_together(cls):
"""Check the value of "index_together" option."""
if not isinstance(cls._meta.index_together, (tuple, list)):
return [
checks.Error(
"'index_together' must be a list or tuple.",
obj=cls,
id="models.E008",
)
]
elif any(
not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together
):
return [
checks.Error(
"All 'index_together' elements must be lists or tuples.",
obj=cls,
id="models.E009",
)
]
else:
errors = []
for fields in cls._meta.index_together:
errors.extend(cls._check_local_fields(fields, "index_together"))
return errors
@classmethod
def _check_unique_together(cls):
"""Check the value of "unique_together" option."""
if not isinstance(cls._meta.unique_together, (tuple, list)):
return [
checks.Error(
"'unique_together' must be a list or tuple.",
obj=cls,
id="models.E010",
)
]
elif any(
not isinstance(fields, (tuple, list))
for fields in cls._meta.unique_together
):
return [
checks.Error(
"All 'unique_together' elements must be lists or tuples.",
obj=cls,
id="models.E011",
)
]
else:
errors = []
for fields in cls._meta.unique_together:
errors.extend(cls._check_local_fields(fields, "unique_together"))
return errors
@classmethod
def _check_indexes(cls, databases):
"""Check fields, names, and conditions of indexes."""
errors = []
references = set()
for index in cls._meta.indexes:
# Index name can't start with an underscore or a number, restricted
# for cross-database compatibility with Oracle.
if index.name[0] == "_" or index.name[0].isdigit():
errors.append(
checks.Error(
"The index name '%s' cannot start with an underscore "
"or a number." % index.name,
obj=cls,
id="models.E033",
),
)
if len(index.name) > index.max_name_length:
errors.append(
checks.Error(
"The index name '%s' cannot be longer than %d "
"characters." % (index.name, index.max_name_length),
obj=cls,
id="models.E034",
),
)
if index.contains_expressions:
for expression in index.expressions:
references.update(
ref[0] for ref in cls._get_expr_references(expression)
)
for db in databases:
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
if not (
connection.features.supports_partial_indexes
or "supports_partial_indexes" in cls._meta.required_db_features
) and any(index.condition is not None for index in cls._meta.indexes):
errors.append(
checks.Warning(
"%s does not support indexes with conditions."
% connection.display_name,
hint=(
"Conditions will be ignored. Silence this warning "
"if you don't care about it."
),
obj=cls,
id="models.W037",
)
)
if not (
connection.features.supports_covering_indexes
or "supports_covering_indexes" in cls._meta.required_db_features
) and any(index.include for index in cls._meta.indexes):
errors.append(
checks.Warning(
"%s does not support indexes with non-key columns."
% connection.display_name,
hint=(
"Non-key columns will be ignored. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W040",
)
)
if not (
connection.features.supports_expression_indexes
or "supports_expression_indexes" in cls._meta.required_db_features
) and any(index.contains_expressions for index in cls._meta.indexes):
errors.append(
checks.Warning(
"%s does not support indexes on expressions."
% connection.display_name,
hint=(
"An index won't be created. Silence this warning "
"if you don't care about it."
),
obj=cls,
id="models.W043",
)
)
fields = [
field for index in cls._meta.indexes for field, _ in index.fields_orders
]
fields += [include for index in cls._meta.indexes for include in index.include]
fields += references
errors.extend(cls._check_local_fields(fields, "indexes"))
return errors
@classmethod
def _check_local_fields(cls, fields, option):
from django.db import models
# In order to avoid hitting the relation tree prematurely, we use our
# own fields_map instead of using get_field()
forward_fields_map = {}
for field in cls._meta._get_fields(reverse=False):
forward_fields_map[field.name] = field
if hasattr(field, "attname"):
forward_fields_map[field.attname] = field
errors = []
for field_name in fields:
try:
field = forward_fields_map[field_name]
except KeyError:
errors.append(
checks.Error(
"'%s' refers to the nonexistent field '%s'."
% (
option,
field_name,
),
obj=cls,
id="models.E012",
)
)
else:
if isinstance(field.remote_field, models.ManyToManyRel):
errors.append(
checks.Error(
"'%s' refers to a ManyToManyField '%s', but "
"ManyToManyFields are not permitted in '%s'."
% (
option,
field_name,
option,
),
obj=cls,
id="models.E013",
)
)
elif field not in cls._meta.local_fields:
errors.append(
checks.Error(
"'%s' refers to field '%s' which is not local to model "
"'%s'." % (option, field_name, cls._meta.object_name),
hint="This issue may be caused by multi-table inheritance.",
obj=cls,
id="models.E016",
)
)
return errors
@classmethod
def _check_ordering(cls):
"""
Check "ordering" option -- is it a list of strings and do all fields
exist?
"""
if cls._meta._ordering_clash:
return [
checks.Error(
"'ordering' and 'order_with_respect_to' cannot be used together.",
obj=cls,
id="models.E021",
),
]
if cls._meta.order_with_respect_to or not cls._meta.ordering:
return []
if not isinstance(cls._meta.ordering, (list, tuple)):
return [
checks.Error(
"'ordering' must be a tuple or list (even if you want to order by "
"only one field).",
obj=cls,
id="models.E014",
)
]
errors = []
fields = cls._meta.ordering
# Skip expressions and '?' fields.
fields = (f for f in fields if isinstance(f, str) and f != "?")
# Convert "-field" to "field".
fields = ((f[1:] if f.startswith("-") else f) for f in fields)
# Separate related fields and non-related fields.
_fields = []
related_fields = []
for f in fields:
if LOOKUP_SEP in f:
related_fields.append(f)
else:
_fields.append(f)
fields = _fields
# Check related fields.
for field in related_fields:
_cls = cls
fld = None
for part in field.split(LOOKUP_SEP):
try:
# pk is an alias that won't be found by opts.get_field.
if part == "pk":
fld = _cls._meta.pk
else:
fld = _cls._meta.get_field(part)
if fld.is_relation:
_cls = fld.path_infos[-1].to_opts.model
else:
_cls = None
except (FieldDoesNotExist, AttributeError):
if fld is None or (
fld.get_transform(part) is None and fld.get_lookup(part) is None
):
errors.append(
checks.Error(
"'ordering' refers to the nonexistent field, "
"related field, or lookup '%s'." % field,
obj=cls,
id="models.E015",
)
)
# Skip ordering on pk. This is always a valid order_by field
# but is an alias and therefore won't be found by opts.get_field.
fields = {f for f in fields if f != "pk"}
# Check for invalid or nonexistent fields in ordering.
invalid_fields = []
# Any field name that is not present in field_names does not exist.
# Also, ordering by m2m fields is not allowed.
opts = cls._meta
valid_fields = set(
chain.from_iterable(
(f.name, f.attname)
if not (f.auto_created and not f.concrete)
else (f.field.related_query_name(),)
for f in chain(opts.fields, opts.related_objects)
)
)
invalid_fields.extend(fields - valid_fields)
for invalid_field in invalid_fields:
errors.append(
checks.Error(
"'ordering' refers to the nonexistent field, related "
"field, or lookup '%s'." % invalid_field,
obj=cls,
id="models.E015",
)
)
return errors
@classmethod
def _check_long_column_names(cls, databases):
"""
Check that any auto-generated column names are shorter than the limits
for each database in which the model will be created.
"""
if not databases:
return []
errors = []
allowed_len = None
db_alias = None
# Find the minimum max allowed length among all specified db_aliases.
for db in databases:
# skip databases where the model won't be created
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
max_name_length = connection.ops.max_name_length()
if max_name_length is None or connection.features.truncates_names:
continue
else:
if allowed_len is None:
allowed_len = max_name_length
db_alias = db
elif max_name_length < allowed_len:
allowed_len = max_name_length
db_alias = db
if allowed_len is None:
return errors
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Check if auto-generated name for the field is too long
# for the database.
if (
f.db_column is None
and column_name is not None
and len(column_name) > allowed_len
):
errors.append(
checks.Error(
'Autogenerated column name too long for field "%s". '
'Maximum length is "%s" for database "%s".'
% (column_name, allowed_len, db_alias),
hint="Set the column name manually using 'db_column'.",
obj=cls,
id="models.E018",
)
)
for f in cls._meta.local_many_to_many:
# Skip nonexistent models.
if isinstance(f.remote_field.through, str):
continue
# Check if auto-generated name for the M2M field is too long
# for the database.
for m2m in f.remote_field.through._meta.local_fields:
_, rel_name = m2m.get_attname_column()
if (
m2m.db_column is None
and rel_name is not None
and len(rel_name) > allowed_len
):
errors.append(
checks.Error(
"Autogenerated column name too long for M2M field "
'"%s". Maximum length is "%s" for database "%s".'
% (rel_name, allowed_len, db_alias),
hint=(
"Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'."
),
obj=cls,
id="models.E019",
)
)
return errors
@classmethod
def _get_expr_references(cls, expr):
if isinstance(expr, Q):
for child in expr.children:
if isinstance(child, tuple):
lookup, value = child
yield tuple(lookup.split(LOOKUP_SEP))
yield from cls._get_expr_references(value)
else:
yield from cls._get_expr_references(child)
elif isinstance(expr, F):
yield tuple(expr.name.split(LOOKUP_SEP))
elif hasattr(expr, "get_source_expressions"):
for src_expr in expr.get_source_expressions():
yield from cls._get_expr_references(src_expr)
@classmethod
def _check_constraints(cls, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
if not (
connection.features.supports_table_check_constraints
or "supports_table_check_constraints" in cls._meta.required_db_features
) and any(
isinstance(constraint, CheckConstraint)
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support check constraints."
% connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W027",
)
)
if not (
connection.features.supports_partial_indexes
or "supports_partial_indexes" in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint)
and constraint.condition is not None
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support unique constraints with "
"conditions." % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W036",
)
)
if not (
connection.features.supports_deferrable_unique_constraints
or "supports_deferrable_unique_constraints"
in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint)
and constraint.deferrable is not None
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support deferrable unique constraints."
% connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W038",
)
)
if not (
connection.features.supports_covering_indexes
or "supports_covering_indexes" in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint) and constraint.include
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support unique constraints with non-key "
"columns." % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W039",
)
)
if not (
connection.features.supports_expression_indexes
or "supports_expression_indexes" in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint)
and constraint.contains_expressions
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support unique constraints on "
"expressions." % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W044",
)
)
fields = set(
chain.from_iterable(
(*constraint.fields, *constraint.include)
for constraint in cls._meta.constraints
if isinstance(constraint, UniqueConstraint)
)
)
references = set()
for constraint in cls._meta.constraints:
if isinstance(constraint, UniqueConstraint):
if (
connection.features.supports_partial_indexes
or "supports_partial_indexes"
not in cls._meta.required_db_features
) and isinstance(constraint.condition, Q):
references.update(
cls._get_expr_references(constraint.condition)
)
if (
connection.features.supports_expression_indexes
or "supports_expression_indexes"
not in cls._meta.required_db_features
) and constraint.contains_expressions:
for expression in constraint.expressions:
references.update(cls._get_expr_references(expression))
elif isinstance(constraint, CheckConstraint):
if (
connection.features.supports_table_check_constraints
or "supports_table_check_constraints"
not in cls._meta.required_db_features
):
if isinstance(constraint.check, Q):
references.update(
cls._get_expr_references(constraint.check)
)
if any(
isinstance(expr, RawSQL)
for expr in constraint.check.flatten()
):
errors.append(
checks.Warning(
f"Check constraint {constraint.name!r} contains "
f"RawSQL() expression and won't be validated "
f"during the model full_clean().",
hint=(
"Silence this warning if you don't care about "
"it."
),
obj=cls,
id="models.W045",
),
)
for field_name, *lookups in references:
# pk is an alias that won't be found by opts.get_field.
if field_name != "pk":
fields.add(field_name)
if not lookups:
# If it has no lookups it cannot result in a JOIN.
continue
try:
if field_name == "pk":
field = cls._meta.pk
else:
field = cls._meta.get_field(field_name)
if not field.is_relation or field.many_to_many or field.one_to_many:
continue
except FieldDoesNotExist:
continue
# JOIN must happen at the first lookup.
first_lookup = lookups[0]
if (
hasattr(field, "get_transform")
and hasattr(field, "get_lookup")
and field.get_transform(first_lookup) is None
and field.get_lookup(first_lookup) is None
):
errors.append(
checks.Error(
"'constraints' refers to the joined field '%s'."
% LOOKUP_SEP.join([field_name] + lookups),
obj=cls,
id="models.E041",
)
)
errors.extend(cls._check_local_fields(fields, "constraints"))
return errors
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(self, ordered_obj, id_list, using=None):
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update(
[ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list)],
["_order"],
)
def method_get_order(self, ordered_obj):
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
pk_name = ordered_obj._meta.pk.name
return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True)
def make_foreign_order_accessors(model, related_model):
setattr(
related_model,
"get_%s_order" % model.__name__.lower(),
partialmethod(method_get_order, model),
)
setattr(
related_model,
"set_%s_order" % model.__name__.lower(),
partialmethod(method_set_order, model),
)
########
# MISC #
########
def model_unpickle(model_id):
"""Used to unpickle Model subclasses with deferred fields."""
if isinstance(model_id, tuple):
model = apps.get_model(*model_id)
else:
# Backwards compat - the model was cached directly in earlier versions.
model = model_id
return model.__new__(model)
model_unpickle.__safe_for_unpickle__ = True
|
49274a175b8b964da8b0002b624852428b70994d8caab0af41a0b219fddcc0a4 | import itertools
import math
from django.core.exceptions import EmptyResultSet
from django.db.models.expressions import Case, Expression, Func, Value, When
from django.db.models.fields import (
BooleanField,
CharField,
DateTimeField,
Field,
IntegerField,
UUIDField,
)
from django.db.models.query_utils import RegisterLookupMixin
from django.utils.datastructures import OrderedSet
from django.utils.functional import cached_property
from django.utils.hashable import make_hashable
class Lookup(Expression):
lookup_name = None
prepare_rhs = True
can_use_none_as_rhs = False
def __init__(self, lhs, rhs):
self.lhs, self.rhs = lhs, rhs
self.rhs = self.get_prep_lookup()
self.lhs = self.get_prep_lhs()
if hasattr(self.lhs, "get_bilateral_transforms"):
bilateral_transforms = self.lhs.get_bilateral_transforms()
else:
bilateral_transforms = []
if bilateral_transforms:
# Warn the user as soon as possible if they are trying to apply
# a bilateral transformation on a nested QuerySet: that won't work.
from django.db.models.sql.query import Query # avoid circular import
if isinstance(rhs, Query):
raise NotImplementedError(
"Bilateral transformations on nested querysets are not implemented."
)
self.bilateral_transforms = bilateral_transforms
def apply_bilateral_transforms(self, value):
for transform in self.bilateral_transforms:
value = transform(value)
return value
def __repr__(self):
return f"{self.__class__.__name__}({self.lhs!r}, {self.rhs!r})"
def batch_process_rhs(self, compiler, connection, rhs=None):
if rhs is None:
rhs = self.rhs
if self.bilateral_transforms:
sqls, sqls_params = [], []
for p in rhs:
value = Value(p, output_field=self.lhs.output_field)
value = self.apply_bilateral_transforms(value)
value = value.resolve_expression(compiler.query)
sql, sql_params = compiler.compile(value)
sqls.append(sql)
sqls_params.extend(sql_params)
else:
_, params = self.get_db_prep_lookup(rhs, connection)
sqls, sqls_params = ["%s"] * len(params), params
return sqls, sqls_params
def get_source_expressions(self):
if self.rhs_is_direct_value():
return [self.lhs]
return [self.lhs, self.rhs]
def set_source_expressions(self, new_exprs):
if len(new_exprs) == 1:
self.lhs = new_exprs[0]
else:
self.lhs, self.rhs = new_exprs
def get_prep_lookup(self):
if not self.prepare_rhs or hasattr(self.rhs, "resolve_expression"):
return self.rhs
if hasattr(self.lhs, "output_field"):
if hasattr(self.lhs.output_field, "get_prep_value"):
return self.lhs.output_field.get_prep_value(self.rhs)
elif self.rhs_is_direct_value():
return Value(self.rhs)
return self.rhs
def get_prep_lhs(self):
if hasattr(self.lhs, "resolve_expression"):
return self.lhs
return Value(self.lhs)
def get_db_prep_lookup(self, value, connection):
return ("%s", [value])
def process_lhs(self, compiler, connection, lhs=None):
lhs = lhs or self.lhs
if hasattr(lhs, "resolve_expression"):
lhs = lhs.resolve_expression(compiler.query)
sql, params = compiler.compile(lhs)
if isinstance(lhs, Lookup):
# Wrapped in parentheses to respect operator precedence.
sql = f"({sql})"
return sql, params
def process_rhs(self, compiler, connection):
value = self.rhs
if self.bilateral_transforms:
if self.rhs_is_direct_value():
# Do not call get_db_prep_lookup here as the value will be
# transformed before being used for lookup
value = Value(value, output_field=self.lhs.output_field)
value = self.apply_bilateral_transforms(value)
value = value.resolve_expression(compiler.query)
if hasattr(value, "as_sql"):
sql, params = compiler.compile(value)
# Ensure expression is wrapped in parentheses to respect operator
# precedence but avoid double wrapping as it can be misinterpreted
# on some backends (e.g. subqueries on SQLite).
if sql and sql[0] != "(":
sql = "(%s)" % sql
return sql, params
else:
return self.get_db_prep_lookup(value, connection)
def rhs_is_direct_value(self):
return not hasattr(self.rhs, "as_sql")
def get_group_by_cols(self):
cols = []
for source in self.get_source_expressions():
cols.extend(source.get_group_by_cols())
return cols
def as_oracle(self, compiler, connection):
# Oracle doesn't allow EXISTS() and filters to be compared to another
# expression unless they're wrapped in a CASE WHEN.
wrapped = False
exprs = []
for expr in (self.lhs, self.rhs):
if connection.ops.conditional_expression_supported_in_where_clause(expr):
expr = Case(When(expr, then=True), default=False)
wrapped = True
exprs.append(expr)
lookup = type(self)(*exprs) if wrapped else self
return lookup.as_sql(compiler, connection)
@cached_property
def output_field(self):
return BooleanField()
@property
def identity(self):
return self.__class__, self.lhs, self.rhs
def __eq__(self, other):
if not isinstance(other, Lookup):
return NotImplemented
return self.identity == other.identity
def __hash__(self):
return hash(make_hashable(self.identity))
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
c = self.copy()
c.is_summary = summarize
c.lhs = self.lhs.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
if hasattr(self.rhs, "resolve_expression"):
c.rhs = self.rhs.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
return c
def select_format(self, compiler, sql, params):
# Wrap filters with a CASE WHEN expression if a database backend
# (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP
# BY list.
if not compiler.connection.features.supports_boolean_expr_in_select_clause:
sql = f"CASE WHEN {sql} THEN 1 ELSE 0 END"
return sql, params
class Transform(RegisterLookupMixin, Func):
"""
RegisterLookupMixin() is first so that get_lookup() and get_transform()
first examine self and then check output_field.
"""
bilateral = False
arity = 1
@property
def lhs(self):
return self.get_source_expressions()[0]
def get_bilateral_transforms(self):
if hasattr(self.lhs, "get_bilateral_transforms"):
bilateral_transforms = self.lhs.get_bilateral_transforms()
else:
bilateral_transforms = []
if self.bilateral:
bilateral_transforms.append(self.__class__)
return bilateral_transforms
class BuiltinLookup(Lookup):
def process_lhs(self, compiler, connection, lhs=None):
lhs_sql, params = super().process_lhs(compiler, connection, lhs)
field_internal_type = self.lhs.output_field.get_internal_type()
db_type = self.lhs.output_field.db_type(connection=connection)
lhs_sql = connection.ops.field_cast_sql(db_type, field_internal_type) % lhs_sql
lhs_sql = (
connection.ops.lookup_cast(self.lookup_name, field_internal_type) % lhs_sql
)
return lhs_sql, list(params)
def as_sql(self, compiler, connection):
lhs_sql, params = self.process_lhs(compiler, connection)
rhs_sql, rhs_params = self.process_rhs(compiler, connection)
params.extend(rhs_params)
rhs_sql = self.get_rhs_op(connection, rhs_sql)
return "%s %s" % (lhs_sql, rhs_sql), params
def get_rhs_op(self, connection, rhs):
return connection.operators[self.lookup_name] % rhs
class FieldGetDbPrepValueMixin:
"""
Some lookups require Field.get_db_prep_value() to be called on their
inputs.
"""
get_db_prep_lookup_value_is_iterable = False
def get_db_prep_lookup(self, value, connection):
# For relational fields, use the 'target_field' attribute of the
# output_field.
field = getattr(self.lhs.output_field, "target_field", None)
get_db_prep_value = (
getattr(field, "get_db_prep_value", None)
or self.lhs.output_field.get_db_prep_value
)
return (
"%s",
[get_db_prep_value(v, connection, prepared=True) for v in value]
if self.get_db_prep_lookup_value_is_iterable
else [get_db_prep_value(value, connection, prepared=True)],
)
class FieldGetDbPrepValueIterableMixin(FieldGetDbPrepValueMixin):
"""
Some lookups require Field.get_db_prep_value() to be called on each value
in an iterable.
"""
get_db_prep_lookup_value_is_iterable = True
def get_prep_lookup(self):
if hasattr(self.rhs, "resolve_expression"):
return self.rhs
prepared_values = []
for rhs_value in self.rhs:
if hasattr(rhs_value, "resolve_expression"):
# An expression will be handled by the database but can coexist
# alongside real values.
pass
elif self.prepare_rhs and hasattr(self.lhs.output_field, "get_prep_value"):
rhs_value = self.lhs.output_field.get_prep_value(rhs_value)
prepared_values.append(rhs_value)
return prepared_values
def process_rhs(self, compiler, connection):
if self.rhs_is_direct_value():
# rhs should be an iterable of values. Use batch_process_rhs()
# to prepare/transform those values.
return self.batch_process_rhs(compiler, connection)
else:
return super().process_rhs(compiler, connection)
def resolve_expression_parameter(self, compiler, connection, sql, param):
params = [param]
if hasattr(param, "resolve_expression"):
param = param.resolve_expression(compiler.query)
if hasattr(param, "as_sql"):
sql, params = compiler.compile(param)
return sql, params
def batch_process_rhs(self, compiler, connection, rhs=None):
pre_processed = super().batch_process_rhs(compiler, connection, rhs)
# The params list may contain expressions which compile to a
# sql/param pair. Zip them to get sql and param pairs that refer to the
# same argument and attempt to replace them with the result of
# compiling the param step.
sql, params = zip(
*(
self.resolve_expression_parameter(compiler, connection, sql, param)
for sql, param in zip(*pre_processed)
)
)
params = itertools.chain.from_iterable(params)
return sql, tuple(params)
class PostgresOperatorLookup(Lookup):
"""Lookup defined by operators on PostgreSQL."""
postgres_operator = None
def as_postgresql(self, compiler, connection):
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = tuple(lhs_params) + tuple(rhs_params)
return "%s %s %s" % (lhs, self.postgres_operator, rhs), params
@Field.register_lookup
class Exact(FieldGetDbPrepValueMixin, BuiltinLookup):
lookup_name = "exact"
def get_prep_lookup(self):
from django.db.models.sql.query import Query # avoid circular import
if isinstance(self.rhs, Query):
if self.rhs.has_limit_one():
if not self.rhs.has_select_fields:
self.rhs.clear_select_clause()
self.rhs.add_fields(["pk"])
else:
raise ValueError(
"The QuerySet value for an exact lookup must be limited to "
"one result using slicing."
)
return super().get_prep_lookup()
def as_sql(self, compiler, connection):
# Avoid comparison against direct rhs if lhs is a boolean value. That
# turns "boolfield__exact=True" into "WHERE boolean_field" instead of
# "WHERE boolean_field = True" when allowed.
if (
isinstance(self.rhs, bool)
and getattr(self.lhs, "conditional", False)
and connection.ops.conditional_expression_supported_in_where_clause(
self.lhs
)
):
lhs_sql, params = self.process_lhs(compiler, connection)
template = "%s" if self.rhs else "NOT %s"
return template % lhs_sql, params
return super().as_sql(compiler, connection)
@Field.register_lookup
class IExact(BuiltinLookup):
lookup_name = "iexact"
prepare_rhs = False
def process_rhs(self, qn, connection):
rhs, params = super().process_rhs(qn, connection)
if params:
params[0] = connection.ops.prep_for_iexact_query(params[0])
return rhs, params
@Field.register_lookup
class GreaterThan(FieldGetDbPrepValueMixin, BuiltinLookup):
lookup_name = "gt"
@Field.register_lookup
class GreaterThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup):
lookup_name = "gte"
@Field.register_lookup
class LessThan(FieldGetDbPrepValueMixin, BuiltinLookup):
lookup_name = "lt"
@Field.register_lookup
class LessThanOrEqual(FieldGetDbPrepValueMixin, BuiltinLookup):
lookup_name = "lte"
class IntegerFieldFloatRounding:
"""
Allow floats to work as query values for IntegerField. Without this, the
decimal portion of the float would always be discarded.
"""
def get_prep_lookup(self):
if isinstance(self.rhs, float):
self.rhs = math.ceil(self.rhs)
return super().get_prep_lookup()
@IntegerField.register_lookup
class IntegerGreaterThanOrEqual(IntegerFieldFloatRounding, GreaterThanOrEqual):
pass
@IntegerField.register_lookup
class IntegerLessThan(IntegerFieldFloatRounding, LessThan):
pass
@Field.register_lookup
class In(FieldGetDbPrepValueIterableMixin, BuiltinLookup):
lookup_name = "in"
def get_prep_lookup(self):
from django.db.models.sql.query import Query # avoid circular import
if isinstance(self.rhs, Query):
self.rhs.clear_ordering(clear_default=True)
if not self.rhs.has_select_fields:
self.rhs.clear_select_clause()
self.rhs.add_fields(["pk"])
return super().get_prep_lookup()
def process_rhs(self, compiler, connection):
db_rhs = getattr(self.rhs, "_db", None)
if db_rhs is not None and db_rhs != connection.alias:
raise ValueError(
"Subqueries aren't allowed across different databases. Force "
"the inner query to be evaluated using `list(inner_query)`."
)
if self.rhs_is_direct_value():
# Remove None from the list as NULL is never equal to anything.
try:
rhs = OrderedSet(self.rhs)
rhs.discard(None)
except TypeError: # Unhashable items in self.rhs
rhs = [r for r in self.rhs if r is not None]
if not rhs:
raise EmptyResultSet
# rhs should be an iterable; use batch_process_rhs() to
# prepare/transform those values.
sqls, sqls_params = self.batch_process_rhs(compiler, connection, rhs)
placeholder = "(" + ", ".join(sqls) + ")"
return (placeholder, sqls_params)
return super().process_rhs(compiler, connection)
def get_rhs_op(self, connection, rhs):
return "IN %s" % rhs
def as_sql(self, compiler, connection):
max_in_list_size = connection.ops.max_in_list_size()
if (
self.rhs_is_direct_value()
and max_in_list_size
and len(self.rhs) > max_in_list_size
):
return self.split_parameter_list_as_sql(compiler, connection)
return super().as_sql(compiler, connection)
def split_parameter_list_as_sql(self, compiler, connection):
# This is a special case for databases which limit the number of
# elements which can appear in an 'IN' clause.
max_in_list_size = connection.ops.max_in_list_size()
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.batch_process_rhs(compiler, connection)
in_clause_elements = ["("]
params = []
for offset in range(0, len(rhs_params), max_in_list_size):
if offset > 0:
in_clause_elements.append(" OR ")
in_clause_elements.append("%s IN (" % lhs)
params.extend(lhs_params)
sqls = rhs[offset : offset + max_in_list_size]
sqls_params = rhs_params[offset : offset + max_in_list_size]
param_group = ", ".join(sqls)
in_clause_elements.append(param_group)
in_clause_elements.append(")")
params.extend(sqls_params)
in_clause_elements.append(")")
return "".join(in_clause_elements), params
class PatternLookup(BuiltinLookup):
param_pattern = "%%%s%%"
prepare_rhs = False
def get_rhs_op(self, connection, rhs):
# Assume we are in startswith. We need to produce SQL like:
# col LIKE %s, ['thevalue%']
# For python values we can (and should) do that directly in Python,
# but if the value is for example reference to other column, then
# we need to add the % pattern match to the lookup by something like
# col LIKE othercol || '%%'
# So, for Python values we don't need any special pattern, but for
# SQL reference values or SQL transformations we need the correct
# pattern added.
if hasattr(self.rhs, "as_sql") or self.bilateral_transforms:
pattern = connection.pattern_ops[self.lookup_name].format(
connection.pattern_esc
)
return pattern.format(rhs)
else:
return super().get_rhs_op(connection, rhs)
def process_rhs(self, qn, connection):
rhs, params = super().process_rhs(qn, connection)
if self.rhs_is_direct_value() and params and not self.bilateral_transforms:
params[0] = self.param_pattern % connection.ops.prep_for_like_query(
params[0]
)
return rhs, params
@Field.register_lookup
class Contains(PatternLookup):
lookup_name = "contains"
@Field.register_lookup
class IContains(Contains):
lookup_name = "icontains"
@Field.register_lookup
class StartsWith(PatternLookup):
lookup_name = "startswith"
param_pattern = "%s%%"
@Field.register_lookup
class IStartsWith(StartsWith):
lookup_name = "istartswith"
@Field.register_lookup
class EndsWith(PatternLookup):
lookup_name = "endswith"
param_pattern = "%%%s"
@Field.register_lookup
class IEndsWith(EndsWith):
lookup_name = "iendswith"
@Field.register_lookup
class Range(FieldGetDbPrepValueIterableMixin, BuiltinLookup):
lookup_name = "range"
def get_rhs_op(self, connection, rhs):
return "BETWEEN %s AND %s" % (rhs[0], rhs[1])
@Field.register_lookup
class IsNull(BuiltinLookup):
lookup_name = "isnull"
prepare_rhs = False
def as_sql(self, compiler, connection):
if not isinstance(self.rhs, bool):
raise ValueError(
"The QuerySet value for an isnull lookup must be True or False."
)
sql, params = self.process_lhs(compiler, connection)
if self.rhs:
return "%s IS NULL" % sql, params
else:
return "%s IS NOT NULL" % sql, params
@Field.register_lookup
class Regex(BuiltinLookup):
lookup_name = "regex"
prepare_rhs = False
def as_sql(self, compiler, connection):
if self.lookup_name in connection.operators:
return super().as_sql(compiler, connection)
else:
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
sql_template = connection.ops.regex_lookup(self.lookup_name)
return sql_template % (lhs, rhs), lhs_params + rhs_params
@Field.register_lookup
class IRegex(Regex):
lookup_name = "iregex"
class YearLookup(Lookup):
def year_lookup_bounds(self, connection, year):
from django.db.models.functions import ExtractIsoYear
iso_year = isinstance(self.lhs, ExtractIsoYear)
output_field = self.lhs.lhs.output_field
if isinstance(output_field, DateTimeField):
bounds = connection.ops.year_lookup_bounds_for_datetime_field(
year,
iso_year=iso_year,
)
else:
bounds = connection.ops.year_lookup_bounds_for_date_field(
year,
iso_year=iso_year,
)
return bounds
def as_sql(self, compiler, connection):
# Avoid the extract operation if the rhs is a direct value to allow
# indexes to be used.
if self.rhs_is_direct_value():
# Skip the extract part by directly using the originating field,
# that is self.lhs.lhs.
lhs_sql, params = self.process_lhs(compiler, connection, self.lhs.lhs)
rhs_sql, _ = self.process_rhs(compiler, connection)
rhs_sql = self.get_direct_rhs_sql(connection, rhs_sql)
start, finish = self.year_lookup_bounds(connection, self.rhs)
params.extend(self.get_bound_params(start, finish))
return "%s %s" % (lhs_sql, rhs_sql), params
return super().as_sql(compiler, connection)
def get_direct_rhs_sql(self, connection, rhs):
return connection.operators[self.lookup_name] % rhs
def get_bound_params(self, start, finish):
raise NotImplementedError(
"subclasses of YearLookup must provide a get_bound_params() method"
)
class YearExact(YearLookup, Exact):
def get_direct_rhs_sql(self, connection, rhs):
return "BETWEEN %s AND %s"
def get_bound_params(self, start, finish):
return (start, finish)
class YearGt(YearLookup, GreaterThan):
def get_bound_params(self, start, finish):
return (finish,)
class YearGte(YearLookup, GreaterThanOrEqual):
def get_bound_params(self, start, finish):
return (start,)
class YearLt(YearLookup, LessThan):
def get_bound_params(self, start, finish):
return (start,)
class YearLte(YearLookup, LessThanOrEqual):
def get_bound_params(self, start, finish):
return (finish,)
class UUIDTextMixin:
"""
Strip hyphens from a value when filtering a UUIDField on backends without
a native datatype for UUID.
"""
def process_rhs(self, qn, connection):
if not connection.features.has_native_uuid_field:
from django.db.models.functions import Replace
if self.rhs_is_direct_value():
self.rhs = Value(self.rhs)
self.rhs = Replace(
self.rhs, Value("-"), Value(""), output_field=CharField()
)
rhs, params = super().process_rhs(qn, connection)
return rhs, params
@UUIDField.register_lookup
class UUIDIExact(UUIDTextMixin, IExact):
pass
@UUIDField.register_lookup
class UUIDContains(UUIDTextMixin, Contains):
pass
@UUIDField.register_lookup
class UUIDIContains(UUIDTextMixin, IContains):
pass
@UUIDField.register_lookup
class UUIDStartsWith(UUIDTextMixin, StartsWith):
pass
@UUIDField.register_lookup
class UUIDIStartsWith(UUIDTextMixin, IStartsWith):
pass
@UUIDField.register_lookup
class UUIDEndsWith(UUIDTextMixin, EndsWith):
pass
@UUIDField.register_lookup
class UUIDIEndsWith(UUIDTextMixin, IEndsWith):
pass
|
a883a3418a66ded3ad8f58ec2554a9cf10c7fe46005a4f0c3868fe477598bc97 | from .fields import AddField, AlterField, RemoveField, RenameField
from .models import (
AddConstraint,
AddIndex,
AlterIndexTogether,
AlterModelManagers,
AlterModelOptions,
AlterModelTable,
AlterModelTableComment,
AlterOrderWithRespectTo,
AlterUniqueTogether,
CreateModel,
DeleteModel,
RemoveConstraint,
RemoveIndex,
RenameIndex,
RenameModel,
)
from .special import RunPython, RunSQL, SeparateDatabaseAndState
__all__ = [
"CreateModel",
"DeleteModel",
"AlterModelTable",
"AlterModelTableComment",
"AlterUniqueTogether",
"RenameModel",
"AlterIndexTogether",
"AlterModelOptions",
"AddIndex",
"RemoveIndex",
"RenameIndex",
"AddField",
"RemoveField",
"AlterField",
"RenameField",
"AddConstraint",
"RemoveConstraint",
"SeparateDatabaseAndState",
"RunSQL",
"RunPython",
"AlterOrderWithRespectTo",
"AlterModelManagers",
]
|
1830ead29268d2bb2e5f07a97683db5e0f54eab2c1034d1a7dd1c5f3c96d6a69 | from django.db import models
from django.db.migrations.operations.base import Operation
from django.db.migrations.state import ModelState
from django.db.migrations.utils import field_references, resolve_relation
from django.db.models.options import normalize_together
from django.utils.functional import cached_property
from .fields import AddField, AlterField, FieldOperation, RemoveField, RenameField
def _check_for_duplicates(arg_name, objs):
used_vals = set()
for val in objs:
if val in used_vals:
raise ValueError(
"Found duplicate value %s in CreateModel %s argument." % (val, arg_name)
)
used_vals.add(val)
class ModelOperation(Operation):
def __init__(self, name):
self.name = name
@cached_property
def name_lower(self):
return self.name.lower()
def references_model(self, name, app_label):
return name.lower() == self.name_lower
def reduce(self, operation, app_label):
return super().reduce(operation, app_label) or self.can_reduce_through(
operation, app_label
)
def can_reduce_through(self, operation, app_label):
return not operation.references_model(self.name, app_label)
class CreateModel(ModelOperation):
"""Create a model's table."""
serialization_expand_args = ["fields", "options", "managers"]
def __init__(self, name, fields, options=None, bases=None, managers=None):
self.fields = fields
self.options = options or {}
self.bases = bases or (models.Model,)
self.managers = managers or []
super().__init__(name)
# Sanity-check that there are no duplicated field names, bases, or
# manager names
_check_for_duplicates("fields", (name for name, _ in self.fields))
_check_for_duplicates(
"bases",
(
base._meta.label_lower
if hasattr(base, "_meta")
else base.lower()
if isinstance(base, str)
else base
for base in self.bases
),
)
_check_for_duplicates("managers", (name for name, _ in self.managers))
def deconstruct(self):
kwargs = {
"name": self.name,
"fields": self.fields,
}
if self.options:
kwargs["options"] = self.options
if self.bases and self.bases != (models.Model,):
kwargs["bases"] = self.bases
if self.managers and self.managers != [("objects", models.Manager())]:
kwargs["managers"] = self.managers
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.add_model(
ModelState(
app_label,
self.name,
list(self.fields),
dict(self.options),
tuple(self.bases),
list(self.managers),
)
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_model(model)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_model(model)
def describe(self):
return "Create %smodel %s" % (
"proxy " if self.options.get("proxy", False) else "",
self.name,
)
@property
def migration_name_fragment(self):
return self.name_lower
def references_model(self, name, app_label):
name_lower = name.lower()
if name_lower == self.name_lower:
return True
# Check we didn't inherit from the model
reference_model_tuple = (app_label, name_lower)
for base in self.bases:
if (
base is not models.Model
and isinstance(base, (models.base.ModelBase, str))
and resolve_relation(base, app_label) == reference_model_tuple
):
return True
# Check we have no FKs/M2Ms with it
for _name, field in self.fields:
if field_references(
(app_label, self.name_lower), field, reference_model_tuple
):
return True
return False
def reduce(self, operation, app_label):
if (
isinstance(operation, DeleteModel)
and self.name_lower == operation.name_lower
and not self.options.get("proxy", False)
):
return []
elif (
isinstance(operation, RenameModel)
and self.name_lower == operation.old_name_lower
):
return [
CreateModel(
operation.new_name,
fields=self.fields,
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif (
isinstance(operation, AlterModelOptions)
and self.name_lower == operation.name_lower
):
options = {**self.options, **operation.options}
for key in operation.ALTER_OPTION_KEYS:
if key not in operation.options:
options.pop(key, None)
return [
CreateModel(
self.name,
fields=self.fields,
options=options,
bases=self.bases,
managers=self.managers,
),
]
elif (
isinstance(operation, AlterModelManagers)
and self.name_lower == operation.name_lower
):
return [
CreateModel(
self.name,
fields=self.fields,
options=self.options,
bases=self.bases,
managers=operation.managers,
),
]
elif (
isinstance(operation, AlterTogetherOptionOperation)
and self.name_lower == operation.name_lower
):
return [
CreateModel(
self.name,
fields=self.fields,
options={
**self.options,
**{operation.option_name: operation.option_value},
},
bases=self.bases,
managers=self.managers,
),
]
elif (
isinstance(operation, AlterOrderWithRespectTo)
and self.name_lower == operation.name_lower
):
return [
CreateModel(
self.name,
fields=self.fields,
options={
**self.options,
"order_with_respect_to": operation.order_with_respect_to,
},
bases=self.bases,
managers=self.managers,
),
]
elif (
isinstance(operation, FieldOperation)
and self.name_lower == operation.model_name_lower
):
if isinstance(operation, AddField):
return [
CreateModel(
self.name,
fields=self.fields + [(operation.name, operation.field)],
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, AlterField):
return [
CreateModel(
self.name,
fields=[
(n, operation.field if n == operation.name else v)
for n, v in self.fields
],
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, RemoveField):
options = self.options.copy()
for option_name in ("unique_together", "index_together"):
option = options.pop(option_name, None)
if option:
option = set(
filter(
bool,
(
tuple(
f for f in fields if f != operation.name_lower
)
for fields in option
),
)
)
if option:
options[option_name] = option
order_with_respect_to = options.get("order_with_respect_to")
if order_with_respect_to == operation.name_lower:
del options["order_with_respect_to"]
return [
CreateModel(
self.name,
fields=[
(n, v)
for n, v in self.fields
if n.lower() != operation.name_lower
],
options=options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, RenameField):
options = self.options.copy()
for option_name in ("unique_together", "index_together"):
option = options.get(option_name)
if option:
options[option_name] = {
tuple(
operation.new_name if f == operation.old_name else f
for f in fields
)
for fields in option
}
order_with_respect_to = options.get("order_with_respect_to")
if order_with_respect_to == operation.old_name:
options["order_with_respect_to"] = operation.new_name
return [
CreateModel(
self.name,
fields=[
(operation.new_name if n == operation.old_name else n, v)
for n, v in self.fields
],
options=options,
bases=self.bases,
managers=self.managers,
),
]
return super().reduce(operation, app_label)
class DeleteModel(ModelOperation):
"""Drop a model's table."""
def deconstruct(self):
kwargs = {
"name": self.name,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.remove_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_model(model)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_model(model)
def references_model(self, name, app_label):
# The deleted model could be referencing the specified model through
# related fields.
return True
def describe(self):
return "Delete model %s" % self.name
@property
def migration_name_fragment(self):
return "delete_%s" % self.name_lower
class RenameModel(ModelOperation):
"""Rename a model."""
def __init__(self, old_name, new_name):
self.old_name = old_name
self.new_name = new_name
super().__init__(old_name)
@cached_property
def old_name_lower(self):
return self.old_name.lower()
@cached_property
def new_name_lower(self):
return self.new_name.lower()
def deconstruct(self):
kwargs = {
"old_name": self.old_name,
"new_name": self.new_name,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.rename_model(app_label, self.old_name, self.new_name)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.new_name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.old_name)
# Move the main table
schema_editor.alter_db_table(
new_model,
old_model._meta.db_table,
new_model._meta.db_table,
)
# Alter the fields pointing to us
for related_object in old_model._meta.related_objects:
if related_object.related_model == old_model:
model = new_model
related_key = (app_label, self.new_name_lower)
else:
model = related_object.related_model
related_key = (
related_object.related_model._meta.app_label,
related_object.related_model._meta.model_name,
)
to_field = to_state.apps.get_model(*related_key)._meta.get_field(
related_object.field.name
)
schema_editor.alter_field(
model,
related_object.field,
to_field,
)
# Rename M2M fields whose name is based on this model's name.
fields = zip(
old_model._meta.local_many_to_many, new_model._meta.local_many_to_many
)
for (old_field, new_field) in fields:
# Skip self-referential fields as these are renamed above.
if (
new_field.model == new_field.related_model
or not new_field.remote_field.through._meta.auto_created
):
continue
# Rename the M2M table that's based on this model's name.
old_m2m_model = old_field.remote_field.through
new_m2m_model = new_field.remote_field.through
schema_editor.alter_db_table(
new_m2m_model,
old_m2m_model._meta.db_table,
new_m2m_model._meta.db_table,
)
# Rename the column in the M2M table that's based on this
# model's name.
schema_editor.alter_field(
new_m2m_model,
old_m2m_model._meta.get_field(old_model._meta.model_name),
new_m2m_model._meta.get_field(new_model._meta.model_name),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.new_name_lower, self.old_name_lower = (
self.old_name_lower,
self.new_name_lower,
)
self.new_name, self.old_name = self.old_name, self.new_name
self.database_forwards(app_label, schema_editor, from_state, to_state)
self.new_name_lower, self.old_name_lower = (
self.old_name_lower,
self.new_name_lower,
)
self.new_name, self.old_name = self.old_name, self.new_name
def references_model(self, name, app_label):
return (
name.lower() == self.old_name_lower or name.lower() == self.new_name_lower
)
def describe(self):
return "Rename model %s to %s" % (self.old_name, self.new_name)
@property
def migration_name_fragment(self):
return "rename_%s_%s" % (self.old_name_lower, self.new_name_lower)
def reduce(self, operation, app_label):
if (
isinstance(operation, RenameModel)
and self.new_name_lower == operation.old_name_lower
):
return [
RenameModel(
self.old_name,
operation.new_name,
),
]
# Skip `ModelOperation.reduce` as we want to run `references_model`
# against self.new_name.
return super(ModelOperation, self).reduce(
operation, app_label
) or not operation.references_model(self.new_name, app_label)
class ModelOptionOperation(ModelOperation):
def reduce(self, operation, app_label):
if (
isinstance(operation, (self.__class__, DeleteModel))
and self.name_lower == operation.name_lower
):
return [operation]
return super().reduce(operation, app_label)
class AlterModelTable(ModelOptionOperation):
"""Rename a model's table."""
def __init__(self, name, table):
self.table = table
super().__init__(name)
def deconstruct(self):
kwargs = {
"name": self.name,
"table": self.table,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.alter_model_options(app_label, self.name_lower, {"db_table": self.table})
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
schema_editor.alter_db_table(
new_model,
old_model._meta.db_table,
new_model._meta.db_table,
)
# Rename M2M fields whose name is based on this model's db_table
for (old_field, new_field) in zip(
old_model._meta.local_many_to_many, new_model._meta.local_many_to_many
):
if new_field.remote_field.through._meta.auto_created:
schema_editor.alter_db_table(
new_field.remote_field.through,
old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def describe(self):
return "Rename table for %s to %s" % (
self.name,
self.table if self.table is not None else "(default)",
)
@property
def migration_name_fragment(self):
return "alter_%s_table" % self.name_lower
class AlterModelTableComment(ModelOptionOperation):
def __init__(self, name, table_comment):
self.table_comment = table_comment
super().__init__(name)
def deconstruct(self):
kwargs = {
"name": self.name,
"table_comment": self.table_comment,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.alter_model_options(
app_label, self.name_lower, {"db_table_comment": self.table_comment}
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
schema_editor.alter_db_table_comment(
new_model,
old_model._meta.db_table_comment,
new_model._meta.db_table_comment,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def describe(self):
return f"Alter {self.name} table comment"
@property
def migration_name_fragment(self):
return f"alter_{self.name_lower}_table_comment"
class AlterTogetherOptionOperation(ModelOptionOperation):
option_name = None
def __init__(self, name, option_value):
if option_value:
option_value = set(normalize_together(option_value))
setattr(self, self.option_name, option_value)
super().__init__(name)
@cached_property
def option_value(self):
return getattr(self, self.option_name)
def deconstruct(self):
kwargs = {
"name": self.name,
self.option_name: self.option_value,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.alter_model_options(
app_label,
self.name_lower,
{self.option_name: self.option_value},
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
alter_together = getattr(schema_editor, "alter_%s" % self.option_name)
alter_together(
new_model,
getattr(old_model._meta, self.option_name, set()),
getattr(new_model._meta, self.option_name, set()),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_field(self, model_name, name, app_label):
return self.references_model(model_name, app_label) and (
not self.option_value
or any((name in fields) for fields in self.option_value)
)
def describe(self):
return "Alter %s for %s (%s constraint(s))" % (
self.option_name,
self.name,
len(self.option_value or ""),
)
@property
def migration_name_fragment(self):
return "alter_%s_%s" % (self.name_lower, self.option_name)
def can_reduce_through(self, operation, app_label):
return super().can_reduce_through(operation, app_label) or (
isinstance(operation, AlterTogetherOptionOperation)
and type(operation) is not type(self)
)
class AlterUniqueTogether(AlterTogetherOptionOperation):
"""
Change the value of unique_together to the target one.
Input value of unique_together must be a set of tuples.
"""
option_name = "unique_together"
def __init__(self, name, unique_together):
super().__init__(name, unique_together)
class AlterIndexTogether(AlterTogetherOptionOperation):
"""
Change the value of index_together to the target one.
Input value of index_together must be a set of tuples.
"""
option_name = "index_together"
def __init__(self, name, index_together):
super().__init__(name, index_together)
class AlterOrderWithRespectTo(ModelOptionOperation):
"""Represent a change with the order_with_respect_to option."""
option_name = "order_with_respect_to"
def __init__(self, name, order_with_respect_to):
self.order_with_respect_to = order_with_respect_to
super().__init__(name)
def deconstruct(self):
kwargs = {
"name": self.name,
"order_with_respect_to": self.order_with_respect_to,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.alter_model_options(
app_label,
self.name_lower,
{self.option_name: self.order_with_respect_to},
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.name)
# Remove a field if we need to
if (
from_model._meta.order_with_respect_to
and not to_model._meta.order_with_respect_to
):
schema_editor.remove_field(
from_model, from_model._meta.get_field("_order")
)
# Add a field if we need to (altering the column is untouched as
# it's likely a rename)
elif (
to_model._meta.order_with_respect_to
and not from_model._meta.order_with_respect_to
):
field = to_model._meta.get_field("_order")
if not field.has_default():
field.default = 0
schema_editor.add_field(
from_model,
field,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_field(self, model_name, name, app_label):
return self.references_model(model_name, app_label) and (
self.order_with_respect_to is None or name == self.order_with_respect_to
)
def describe(self):
return "Set order_with_respect_to on %s to %s" % (
self.name,
self.order_with_respect_to,
)
@property
def migration_name_fragment(self):
return "alter_%s_order_with_respect_to" % self.name_lower
class AlterModelOptions(ModelOptionOperation):
"""
Set new model options that don't directly affect the database schema
(like verbose_name, permissions, ordering). Python code in migrations
may still need them.
"""
# Model options we want to compare and preserve in an AlterModelOptions op
ALTER_OPTION_KEYS = [
"base_manager_name",
"default_manager_name",
"default_related_name",
"get_latest_by",
"managed",
"ordering",
"permissions",
"default_permissions",
"select_on_save",
"verbose_name",
"verbose_name_plural",
]
def __init__(self, name, options):
self.options = options
super().__init__(name)
def deconstruct(self):
kwargs = {
"name": self.name,
"options": self.options,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.alter_model_options(
app_label,
self.name_lower,
self.options,
self.ALTER_OPTION_KEYS,
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
def describe(self):
return "Change Meta options on %s" % self.name
@property
def migration_name_fragment(self):
return "alter_%s_options" % self.name_lower
class AlterModelManagers(ModelOptionOperation):
"""Alter the model's managers."""
serialization_expand_args = ["managers"]
def __init__(self, name, managers):
self.managers = managers
super().__init__(name)
def deconstruct(self):
return (self.__class__.__qualname__, [self.name, self.managers], {})
def state_forwards(self, app_label, state):
state.alter_model_managers(app_label, self.name_lower, self.managers)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
def describe(self):
return "Change managers on %s" % self.name
@property
def migration_name_fragment(self):
return "alter_%s_managers" % self.name_lower
class IndexOperation(Operation):
option_name = "indexes"
@cached_property
def model_name_lower(self):
return self.model_name.lower()
class AddIndex(IndexOperation):
"""Add an index on a model."""
def __init__(self, model_name, index):
self.model_name = model_name
if not index.name:
raise ValueError(
"Indexes passed to AddIndex operations require a name "
"argument. %r doesn't have one." % index
)
self.index = index
def state_forwards(self, app_label, state):
state.add_index(app_label, self.model_name_lower, self.index)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_index(model, self.index)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.remove_index(model, self.index)
def deconstruct(self):
kwargs = {
"model_name": self.model_name,
"index": self.index,
}
return (
self.__class__.__qualname__,
[],
kwargs,
)
def describe(self):
if self.index.expressions:
return "Create index %s on %s on model %s" % (
self.index.name,
", ".join([str(expression) for expression in self.index.expressions]),
self.model_name,
)
return "Create index %s on field(s) %s of model %s" % (
self.index.name,
", ".join(self.index.fields),
self.model_name,
)
@property
def migration_name_fragment(self):
return "%s_%s" % (self.model_name_lower, self.index.name.lower())
class RemoveIndex(IndexOperation):
"""Remove an index from a model."""
def __init__(self, model_name, name):
self.model_name = model_name
self.name = name
def state_forwards(self, app_label, state):
state.remove_index(app_label, self.model_name_lower, self.name)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
from_model_state = from_state.models[app_label, self.model_name_lower]
index = from_model_state.get_index_by_name(self.name)
schema_editor.remove_index(model, index)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
to_model_state = to_state.models[app_label, self.model_name_lower]
index = to_model_state.get_index_by_name(self.name)
schema_editor.add_index(model, index)
def deconstruct(self):
kwargs = {
"model_name": self.model_name,
"name": self.name,
}
return (
self.__class__.__qualname__,
[],
kwargs,
)
def describe(self):
return "Remove index %s from %s" % (self.name, self.model_name)
@property
def migration_name_fragment(self):
return "remove_%s_%s" % (self.model_name_lower, self.name.lower())
class RenameIndex(IndexOperation):
"""Rename an index."""
def __init__(self, model_name, new_name, old_name=None, old_fields=None):
if not old_name and not old_fields:
raise ValueError(
"RenameIndex requires one of old_name and old_fields arguments to be "
"set."
)
if old_name and old_fields:
raise ValueError(
"RenameIndex.old_name and old_fields are mutually exclusive."
)
self.model_name = model_name
self.new_name = new_name
self.old_name = old_name
self.old_fields = old_fields
@cached_property
def old_name_lower(self):
return self.old_name.lower()
@cached_property
def new_name_lower(self):
return self.new_name.lower()
def deconstruct(self):
kwargs = {
"model_name": self.model_name,
"new_name": self.new_name,
}
if self.old_name:
kwargs["old_name"] = self.old_name
if self.old_fields:
kwargs["old_fields"] = self.old_fields
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
if self.old_fields:
state.add_index(
app_label,
self.model_name_lower,
models.Index(fields=self.old_fields, name=self.new_name),
)
state.remove_model_options(
app_label,
self.model_name_lower,
AlterIndexTogether.option_name,
self.old_fields,
)
else:
state.rename_index(
app_label, self.model_name_lower, self.old_name, self.new_name
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if not self.allow_migrate_model(schema_editor.connection.alias, model):
return
if self.old_fields:
from_model = from_state.apps.get_model(app_label, self.model_name)
columns = [
from_model._meta.get_field(field).column for field in self.old_fields
]
matching_index_name = schema_editor._constraint_names(
from_model, column_names=columns, index=True
)
if len(matching_index_name) != 1:
raise ValueError(
"Found wrong number (%s) of indexes for %s(%s)."
% (
len(matching_index_name),
from_model._meta.db_table,
", ".join(columns),
)
)
old_index = models.Index(
fields=self.old_fields,
name=matching_index_name[0],
)
else:
from_model_state = from_state.models[app_label, self.model_name_lower]
old_index = from_model_state.get_index_by_name(self.old_name)
# Don't alter when the index name is not changed.
if old_index.name == self.new_name:
return
to_model_state = to_state.models[app_label, self.model_name_lower]
new_index = to_model_state.get_index_by_name(self.new_name)
schema_editor.rename_index(model, old_index, new_index)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if self.old_fields:
# Backward operation with unnamed index is a no-op.
return
self.new_name_lower, self.old_name_lower = (
self.old_name_lower,
self.new_name_lower,
)
self.new_name, self.old_name = self.old_name, self.new_name
self.database_forwards(app_label, schema_editor, from_state, to_state)
self.new_name_lower, self.old_name_lower = (
self.old_name_lower,
self.new_name_lower,
)
self.new_name, self.old_name = self.old_name, self.new_name
def describe(self):
if self.old_name:
return (
f"Rename index {self.old_name} on {self.model_name} to {self.new_name}"
)
return (
f"Rename unnamed index for {self.old_fields} on {self.model_name} to "
f"{self.new_name}"
)
@property
def migration_name_fragment(self):
if self.old_name:
return "rename_%s_%s" % (self.old_name_lower, self.new_name_lower)
return "rename_%s_%s_%s" % (
self.model_name_lower,
"_".join(self.old_fields),
self.new_name_lower,
)
def reduce(self, operation, app_label):
if (
isinstance(operation, RenameIndex)
and self.model_name_lower == operation.model_name_lower
and operation.old_name
and self.new_name_lower == operation.old_name_lower
):
return [
RenameIndex(
self.model_name,
new_name=operation.new_name,
old_name=self.old_name,
old_fields=self.old_fields,
)
]
return super().reduce(operation, app_label)
class AddConstraint(IndexOperation):
option_name = "constraints"
def __init__(self, model_name, constraint):
self.model_name = model_name
self.constraint = constraint
def state_forwards(self, app_label, state):
state.add_constraint(app_label, self.model_name_lower, self.constraint)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_constraint(model, self.constraint)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.remove_constraint(model, self.constraint)
def deconstruct(self):
return (
self.__class__.__name__,
[],
{
"model_name": self.model_name,
"constraint": self.constraint,
},
)
def describe(self):
return "Create constraint %s on model %s" % (
self.constraint.name,
self.model_name,
)
@property
def migration_name_fragment(self):
return "%s_%s" % (self.model_name_lower, self.constraint.name.lower())
class RemoveConstraint(IndexOperation):
option_name = "constraints"
def __init__(self, model_name, name):
self.model_name = model_name
self.name = name
def state_forwards(self, app_label, state):
state.remove_constraint(app_label, self.model_name_lower, self.name)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
from_model_state = from_state.models[app_label, self.model_name_lower]
constraint = from_model_state.get_constraint_by_name(self.name)
schema_editor.remove_constraint(model, constraint)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
to_model_state = to_state.models[app_label, self.model_name_lower]
constraint = to_model_state.get_constraint_by_name(self.name)
schema_editor.add_constraint(model, constraint)
def deconstruct(self):
return (
self.__class__.__name__,
[],
{
"model_name": self.model_name,
"name": self.name,
},
)
def describe(self):
return "Remove constraint %s from model %s" % (self.name, self.model_name)
@property
def migration_name_fragment(self):
return "remove_%s_%s" % (self.model_name_lower, self.name.lower())
|
4d86307da4e6fd6ff3bc0ee3f50a43c9c7c4e05520d55bcd20513c84e3ee811c | import collections.abc
import copy
import datetime
import decimal
import operator
import uuid
import warnings
from base64 import b64decode, b64encode
from functools import partialmethod, total_ordering
from django import forms
from django.apps import apps
from django.conf import settings
from django.core import checks, exceptions, validators
from django.db import connection, connections, router
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin
from django.utils import timezone
from django.utils.datastructures import DictWrapper
from django.utils.dateparse import (
parse_date,
parse_datetime,
parse_duration,
parse_time,
)
from django.utils.duration import duration_microseconds, duration_string
from django.utils.functional import Promise, cached_property
from django.utils.ipv6 import clean_ipv6_address
from django.utils.itercompat import is_iterable
from django.utils.text import capfirst
from django.utils.translation import gettext_lazy as _
__all__ = [
"AutoField",
"BLANK_CHOICE_DASH",
"BigAutoField",
"BigIntegerField",
"BinaryField",
"BooleanField",
"CharField",
"CommaSeparatedIntegerField",
"DateField",
"DateTimeField",
"DecimalField",
"DurationField",
"EmailField",
"Empty",
"Field",
"FilePathField",
"FloatField",
"GenericIPAddressField",
"IPAddressField",
"IntegerField",
"NOT_PROVIDED",
"NullBooleanField",
"PositiveBigIntegerField",
"PositiveIntegerField",
"PositiveSmallIntegerField",
"SlugField",
"SmallAutoField",
"SmallIntegerField",
"TextField",
"TimeField",
"URLField",
"UUIDField",
]
class Empty:
pass
class NOT_PROVIDED:
pass
# The values to use for "blank" in SelectFields. Will be appended to the start
# of most "choices" lists.
BLANK_CHOICE_DASH = [("", "---------")]
def _load_field(app_label, model_name, field_name):
return apps.get_model(app_label, model_name)._meta.get_field(field_name)
# A guide to Field parameters:
#
# * name: The name of the field specified in the model.
# * attname: The attribute to use on the model object. This is the same as
# "name", except in the case of ForeignKeys, where "_id" is
# appended.
# * db_column: The db_column specified in the model (or None).
# * column: The database column for this field. This is the same as
# "attname", except if db_column is specified.
#
# Code that introspects values, or does other dynamic things, should use
# attname. For example, this gets the primary key value of object "obj":
#
# getattr(obj, opts.pk.attname)
def _empty(of_cls):
new = Empty()
new.__class__ = of_cls
return new
def return_None():
return None
@total_ordering
class Field(RegisterLookupMixin):
"""Base class for all field types"""
# Designates whether empty strings fundamentally are allowed at the
# database level.
empty_strings_allowed = True
empty_values = list(validators.EMPTY_VALUES)
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that Django implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
default_validators = [] # Default set of validators
default_error_messages = {
"invalid_choice": _("Value %(value)r is not a valid choice."),
"null": _("This field cannot be null."),
"blank": _("This field cannot be blank."),
"unique": _("%(model_name)s with this %(field_label)s already exists."),
"unique_for_date": _(
# Translators: The 'lookup_type' is one of 'date', 'year' or
# 'month'. Eg: "Title must be unique for pub_date year"
"%(field_label)s must be unique for "
"%(date_field_label)s %(lookup_type)s."
),
}
system_check_deprecated_details = None
system_check_removed_details = None
# Attributes that don't affect a column definition.
# These attributes are ignored when altering the field.
non_db_attrs = (
"blank",
"choices",
"db_column",
"editable",
"error_messages",
"help_text",
"limit_choices_to",
# Database-level options are not supported, see #21961.
"on_delete",
"related_name",
"related_query_name",
"validators",
"verbose_name",
)
# Field flags
hidden = False
many_to_many = None
many_to_one = None
one_to_many = None
one_to_one = None
related_model = None
descriptor_class = DeferredAttribute
# Generic field type description, usually overridden by subclasses
def _description(self):
return _("Field of type: %(field_type)s") % {
"field_type": self.__class__.__name__
}
description = property(_description)
def __init__(
self,
verbose_name=None,
name=None,
primary_key=False,
max_length=None,
unique=False,
blank=False,
null=False,
db_index=False,
rel=None,
default=NOT_PROVIDED,
editable=True,
serialize=True,
unique_for_date=None,
unique_for_month=None,
unique_for_year=None,
choices=None,
help_text="",
db_column=None,
db_tablespace=None,
auto_created=False,
validators=(),
error_messages=None,
db_comment=None,
):
self.name = name
self.verbose_name = verbose_name # May be set by set_attributes_from_name
self._verbose_name = verbose_name # Store original for deconstruction
self.primary_key = primary_key
self.max_length, self._unique = max_length, unique
self.blank, self.null = blank, null
self.remote_field = rel
self.is_relation = self.remote_field is not None
self.default = default
self.editable = editable
self.serialize = serialize
self.unique_for_date = unique_for_date
self.unique_for_month = unique_for_month
self.unique_for_year = unique_for_year
if isinstance(choices, collections.abc.Iterator):
choices = list(choices)
self.choices = choices
self.help_text = help_text
self.db_index = db_index
self.db_column = db_column
self.db_comment = db_comment
self._db_tablespace = db_tablespace
self.auto_created = auto_created
# Adjust the appropriate creation counter, and save our local copy.
if auto_created:
self.creation_counter = Field.auto_creation_counter
Field.auto_creation_counter -= 1
else:
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
self._validators = list(validators) # Store for deconstruction later
self._error_messages = error_messages # Store for deconstruction later
def __str__(self):
"""
Return "app_label.model_label.field_name" for fields attached to
models.
"""
if not hasattr(self, "model"):
return super().__str__()
model = self.model
return "%s.%s" % (model._meta.label, self.name)
def __repr__(self):
"""Display the module, class, and name of the field."""
path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__)
name = getattr(self, "name", None)
if name is not None:
return "<%s: %s>" % (path, name)
return "<%s>" % path
def check(self, **kwargs):
return [
*self._check_field_name(),
*self._check_choices(),
*self._check_db_index(),
*self._check_db_comment(**kwargs),
*self._check_null_allowed_for_primary_keys(),
*self._check_backend_specific_checks(**kwargs),
*self._check_validators(),
*self._check_deprecation_details(),
]
def _check_field_name(self):
"""
Check if field name is valid, i.e. 1) does not end with an
underscore, 2) does not contain "__" and 3) is not "pk".
"""
if self.name.endswith("_"):
return [
checks.Error(
"Field names must not end with an underscore.",
obj=self,
id="fields.E001",
)
]
elif LOOKUP_SEP in self.name:
return [
checks.Error(
'Field names must not contain "%s".' % LOOKUP_SEP,
obj=self,
id="fields.E002",
)
]
elif self.name == "pk":
return [
checks.Error(
"'pk' is a reserved word that cannot be used as a field name.",
obj=self,
id="fields.E003",
)
]
else:
return []
@classmethod
def _choices_is_value(cls, value):
return isinstance(value, (str, Promise)) or not is_iterable(value)
def _check_choices(self):
if not self.choices:
return []
if not is_iterable(self.choices) or isinstance(self.choices, str):
return [
checks.Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=self,
id="fields.E004",
)
]
choice_max_length = 0
# Expect [group_name, [value, display]]
for choices_group in self.choices:
try:
group_name, group_choices = choices_group
except (TypeError, ValueError):
# Containing non-pairs
break
try:
if not all(
self._choices_is_value(value) and self._choices_is_value(human_name)
for value, human_name in group_choices
):
break
if self.max_length is not None and group_choices:
choice_max_length = max(
[
choice_max_length,
*(
len(value)
for value, _ in group_choices
if isinstance(value, str)
),
]
)
except (TypeError, ValueError):
# No groups, choices in the form [value, display]
value, human_name = group_name, group_choices
if not self._choices_is_value(value) or not self._choices_is_value(
human_name
):
break
if self.max_length is not None and isinstance(value, str):
choice_max_length = max(choice_max_length, len(value))
# Special case: choices=['ab']
if isinstance(choices_group, str):
break
else:
if self.max_length is not None and choice_max_length > self.max_length:
return [
checks.Error(
"'max_length' is too small to fit the longest value "
"in 'choices' (%d characters)." % choice_max_length,
obj=self,
id="fields.E009",
),
]
return []
return [
checks.Error(
"'choices' must be an iterable containing "
"(actual value, human readable name) tuples.",
obj=self,
id="fields.E005",
)
]
def _check_db_index(self):
if self.db_index not in (None, True, False):
return [
checks.Error(
"'db_index' must be None, True or False.",
obj=self,
id="fields.E006",
)
]
else:
return []
def _check_db_comment(self, databases=None, **kwargs):
if not self.db_comment or not databases:
return []
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
connection.features.supports_comments
or "supports_comments" in self.model._meta.required_db_features
):
errors.append(
checks.Warning(
f"{connection.display_name} does not support comments on "
f"columns (db_comment).",
obj=self,
id="fields.W163",
)
)
return errors
def _check_null_allowed_for_primary_keys(self):
if (
self.primary_key
and self.null
and not connection.features.interprets_empty_strings_as_nulls
):
# We cannot reliably check this for backends like Oracle which
# consider NULL and '' to be equal (and thus set up
# character-based fields a little differently).
return [
checks.Error(
"Primary keys must not have null=True.",
hint=(
"Set null=False on the field, or "
"remove primary_key=True argument."
),
obj=self,
id="fields.E007",
)
]
else:
return []
def _check_backend_specific_checks(self, databases=None, **kwargs):
if databases is None:
return []
errors = []
for alias in databases:
if router.allow_migrate_model(alias, self.model):
errors.extend(connections[alias].validation.check_field(self, **kwargs))
return errors
def _check_validators(self):
errors = []
for i, validator in enumerate(self.validators):
if not callable(validator):
errors.append(
checks.Error(
"All 'validators' must be callable.",
hint=(
"validators[{i}] ({repr}) isn't a function or "
"instance of a validator class.".format(
i=i,
repr=repr(validator),
)
),
obj=self,
id="fields.E008",
)
)
return errors
def _check_deprecation_details(self):
if self.system_check_removed_details is not None:
return [
checks.Error(
self.system_check_removed_details.get(
"msg",
"%s has been removed except for support in historical "
"migrations." % self.__class__.__name__,
),
hint=self.system_check_removed_details.get("hint"),
obj=self,
id=self.system_check_removed_details.get("id", "fields.EXXX"),
)
]
elif self.system_check_deprecated_details is not None:
return [
checks.Warning(
self.system_check_deprecated_details.get(
"msg", "%s has been deprecated." % self.__class__.__name__
),
hint=self.system_check_deprecated_details.get("hint"),
obj=self,
id=self.system_check_deprecated_details.get("id", "fields.WXXX"),
)
]
return []
def get_col(self, alias, output_field=None):
if alias == self.model._meta.db_table and (
output_field is None or output_field == self
):
return self.cached_col
from django.db.models.expressions import Col
return Col(alias, self, output_field)
@cached_property
def cached_col(self):
from django.db.models.expressions import Col
return Col(self.model._meta.db_table, self)
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, GIS columns need to be
selected as AsText(table.col) on MySQL as the table.col data can't be
used by Django.
"""
return sql, params
def deconstruct(self):
"""
Return enough information to recreate the field as a 4-tuple:
* The name of the field on the model, if contribute_to_class() has
been run.
* The import path of the field, including the class, e.g.
django.db.models.IntegerField. This should be the most portable
version, so less specific may be better.
* A list of positional arguments.
* A dict of keyword arguments.
Note that the positional or keyword arguments must contain values of
the following types (including inner values of collection types):
* None, bool, str, int, float, complex, set, frozenset, list, tuple,
dict
* UUID
* datetime.datetime (naive), datetime.date
* top-level classes, top-level functions - will be referenced by their
full import path
* Storage instances - these have their own deconstruct() method
This is because the values here must be serialized into a text format
(possibly new Python code, possibly JSON) and these are the only types
with encoding handlers defined.
There's no need to return the exact way the field was instantiated this
time, just ensure that the resulting field is the same - prefer keyword
arguments over positional ones, and omit parameters with their default
values.
"""
# Short-form way of fetching all the default parameters
keywords = {}
possibles = {
"verbose_name": None,
"primary_key": False,
"max_length": None,
"unique": False,
"blank": False,
"null": False,
"db_index": False,
"default": NOT_PROVIDED,
"editable": True,
"serialize": True,
"unique_for_date": None,
"unique_for_month": None,
"unique_for_year": None,
"choices": None,
"help_text": "",
"db_column": None,
"db_comment": None,
"db_tablespace": None,
"auto_created": False,
"validators": [],
"error_messages": None,
}
attr_overrides = {
"unique": "_unique",
"error_messages": "_error_messages",
"validators": "_validators",
"verbose_name": "_verbose_name",
"db_tablespace": "_db_tablespace",
}
equals_comparison = {"choices", "validators"}
for name, default in possibles.items():
value = getattr(self, attr_overrides.get(name, name))
# Unroll anything iterable for choices into a concrete list
if name == "choices" and isinstance(value, collections.abc.Iterable):
value = list(value)
# Do correct kind of comparison
if name in equals_comparison:
if value != default:
keywords[name] = value
else:
if value is not default:
keywords[name] = value
# Work out path - we shorten it for known Django core fields
path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__)
if path.startswith("django.db.models.fields.related"):
path = path.replace("django.db.models.fields.related", "django.db.models")
elif path.startswith("django.db.models.fields.files"):
path = path.replace("django.db.models.fields.files", "django.db.models")
elif path.startswith("django.db.models.fields.json"):
path = path.replace("django.db.models.fields.json", "django.db.models")
elif path.startswith("django.db.models.fields.proxy"):
path = path.replace("django.db.models.fields.proxy", "django.db.models")
elif path.startswith("django.db.models.fields"):
path = path.replace("django.db.models.fields", "django.db.models")
# Return basic info - other fields should override this.
return (self.name, path, [], keywords)
def clone(self):
"""
Uses deconstruct() to clone a new copy of this Field.
Will not preserve any class attachments/attribute names.
"""
name, path, args, kwargs = self.deconstruct()
return self.__class__(*args, **kwargs)
def __eq__(self, other):
# Needed for @total_ordering
if isinstance(other, Field):
return self.creation_counter == other.creation_counter and getattr(
self, "model", None
) == getattr(other, "model", None)
return NotImplemented
def __lt__(self, other):
# This is needed because bisect does not take a comparison function.
# Order by creation_counter first for backward compatibility.
if isinstance(other, Field):
if (
self.creation_counter != other.creation_counter
or not hasattr(self, "model")
and not hasattr(other, "model")
):
return self.creation_counter < other.creation_counter
elif hasattr(self, "model") != hasattr(other, "model"):
return not hasattr(self, "model") # Order no-model fields first
else:
# creation_counter's are equal, compare only models.
return (self.model._meta.app_label, self.model._meta.model_name) < (
other.model._meta.app_label,
other.model._meta.model_name,
)
return NotImplemented
def __hash__(self):
return hash(self.creation_counter)
def __deepcopy__(self, memodict):
# We don't have to deepcopy very much here, since most things are not
# intended to be altered after initial creation.
obj = copy.copy(self)
if self.remote_field:
obj.remote_field = copy.copy(self.remote_field)
if hasattr(self.remote_field, "field") and self.remote_field.field is self:
obj.remote_field.field = obj
memodict[id(self)] = obj
return obj
def __copy__(self):
# We need to avoid hitting __reduce__, so define this
# slightly weird copy construct.
obj = Empty()
obj.__class__ = self.__class__
obj.__dict__ = self.__dict__.copy()
return obj
def __reduce__(self):
"""
Pickling should return the model._meta.fields instance of the field,
not a new copy of that field. So, use the app registry to load the
model and then the field back.
"""
if not hasattr(self, "model"):
# Fields are sometimes used without attaching them to models (for
# example in aggregation). In this case give back a plain field
# instance. The code below will create a new empty instance of
# class self.__class__, then update its dict with self.__dict__
# values - so, this is very close to normal pickle.
state = self.__dict__.copy()
# The _get_default cached_property can't be pickled due to lambda
# usage.
state.pop("_get_default", None)
return _empty, (self.__class__,), state
return _load_field, (
self.model._meta.app_label,
self.model._meta.object_name,
self.name,
)
def get_pk_value_on_save(self, instance):
"""
Hook to generate new PK values on save. This method is called when
saving instances with no primary key value set. If this method returns
something else than None, then the returned value is used when saving
the new instance.
"""
if self.default:
return self.get_default()
return None
def to_python(self, value):
"""
Convert the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Return the converted value. Subclasses should override this.
"""
return value
@cached_property
def error_messages(self):
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, "default_error_messages", {}))
messages.update(self._error_messages or {})
return messages
@cached_property
def validators(self):
"""
Some validators can't be created at field initialization time.
This method provides a way to delay their creation until required.
"""
return [*self.default_validators, *self._validators]
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except exceptions.ValidationError as e:
if hasattr(e, "code") and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise exceptions.ValidationError(errors)
def validate(self, value, model_instance):
"""
Validate value and raise ValidationError if necessary. Subclasses
should override this to provide validation logic.
"""
if not self.editable:
# Skip validation for non-editable fields.
return
if self.choices is not None and value not in self.empty_values:
for option_key, option_value in self.choices:
if isinstance(option_value, (list, tuple)):
# This is an optgroup, so look inside the group for
# options.
for optgroup_key, optgroup_value in option_value:
if value == optgroup_key:
return
elif value == option_key:
return
raise exceptions.ValidationError(
self.error_messages["invalid_choice"],
code="invalid_choice",
params={"value": value},
)
if value is None and not self.null:
raise exceptions.ValidationError(self.error_messages["null"], code="null")
if not self.blank and value in self.empty_values:
raise exceptions.ValidationError(self.error_messages["blank"], code="blank")
def clean(self, value, model_instance):
"""
Convert the value's type and run validation. Validation errors
from to_python() and validate() are propagated. Return the correct
value if no error is raised.
"""
value = self.to_python(value)
self.validate(value, model_instance)
self.run_validators(value)
return value
def db_type_parameters(self, connection):
return DictWrapper(self.__dict__, connection.ops.quote_name, "qn_")
def db_check(self, connection):
"""
Return the database column check constraint for this field, for the
provided connection. Works the same way as db_type() for the case that
get_internal_type() does not map to a preexisting model field.
"""
data = self.db_type_parameters(connection)
try:
return (
connection.data_type_check_constraints[self.get_internal_type()] % data
)
except KeyError:
return None
def db_type(self, connection):
"""
Return the database column data type for this field, for the provided
connection.
"""
# The default implementation of this method looks at the
# backend-specific data_types dictionary, looking up the field by its
# "internal type".
#
# A Field class can implement the get_internal_type() method to specify
# which *preexisting* Django Field class it's most similar to -- i.e.,
# a custom field might be represented by a TEXT column type, which is
# the same as the TextField Django field type, which means the custom
# field's get_internal_type() returns 'TextField'.
#
# But the limitation of the get_internal_type() / data_types approach
# is that it cannot handle database column types that aren't already
# mapped to one of the built-in Django field types. In this case, you
# can implement db_type() instead of get_internal_type() to specify
# exactly which wacky database column type you want to use.
data = self.db_type_parameters(connection)
try:
column_type = connection.data_types[self.get_internal_type()]
except KeyError:
return None
else:
# column_type is either a single-parameter function or a string.
if callable(column_type):
return column_type(data)
return column_type % data
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. For example, this method is called by ForeignKey and OneToOneField
to determine its data type.
"""
return self.db_type(connection)
def cast_db_type(self, connection):
"""Return the data type to use in the Cast() function."""
db_type = connection.ops.cast_data_types.get(self.get_internal_type())
if db_type:
return db_type % self.db_type_parameters(connection)
return self.db_type(connection)
def db_parameters(self, connection):
"""
Extension of db_type(), providing a range of different return values
(type, checks). This will look at db_type(), allowing custom model
fields to override it.
"""
type_string = self.db_type(connection)
check_string = self.db_check(connection)
return {
"type": type_string,
"check": check_string,
}
def db_type_suffix(self, connection):
return connection.data_types_suffix.get(self.get_internal_type())
def get_db_converters(self, connection):
if hasattr(self, "from_db_value"):
return [self.from_db_value]
return []
@property
def unique(self):
return self._unique or self.primary_key
@property
def db_tablespace(self):
return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
@property
def db_returning(self):
"""
Private API intended only to be used by Django itself. Currently only
the PostgreSQL backend supports returning multiple fields on a model.
"""
return False
def set_attributes_from_name(self, name):
self.name = self.name or name
self.attname, self.column = self.get_attname_column()
self.concrete = self.column is not None
if self.verbose_name is None and self.name:
self.verbose_name = self.name.replace("_", " ")
def contribute_to_class(self, cls, name, private_only=False):
"""
Register the field with the model class it belongs to.
If private_only is True, create a separate instance of this field
for every subclass of cls, even if cls is not an abstract model.
"""
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self, private=private_only)
if self.column:
setattr(cls, self.attname, self.descriptor_class(self))
if self.choices is not None:
# Don't override a get_FOO_display() method defined explicitly on
# this class, but don't check methods derived from inheritance, to
# allow overriding inherited choices. For more complex inheritance
# structures users should override contribute_to_class().
if "get_%s_display" % self.name not in cls.__dict__:
setattr(
cls,
"get_%s_display" % self.name,
partialmethod(cls._get_FIELD_display, field=self),
)
def get_filter_kwargs_for_object(self, obj):
"""
Return a dict that when passed as kwargs to self.model.filter(), would
yield all instances having the same value for this field as obj has.
"""
return {self.name: getattr(obj, self.attname)}
def get_attname(self):
return self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_internal_type(self):
return self.__class__.__name__
def pre_save(self, model_instance, add):
"""Return field's value just before saving."""
return getattr(model_instance, self.attname)
def get_prep_value(self, value):
"""Perform preliminary non-db specific value checks and conversions."""
if isinstance(value, Promise):
value = value._proxy____cast()
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""
Return field's value prepared for interacting with the database backend.
Used by the default implementations of get_db_prep_save().
"""
if not prepared:
value = self.get_prep_value(value)
return value
def get_db_prep_save(self, value, connection):
"""Return field's value prepared for saving into a database."""
if hasattr(value, "as_sql"):
return value
return self.get_db_prep_value(value, connection=connection, prepared=False)
def has_default(self):
"""Return a boolean of whether this field has a default value."""
return self.default is not NOT_PROVIDED
def get_default(self):
"""Return the default value for this field."""
return self._get_default()
@cached_property
def _get_default(self):
if self.has_default():
if callable(self.default):
return self.default
return lambda: self.default
if (
not self.empty_strings_allowed
or self.null
and not connection.features.interprets_empty_strings_as_nulls
):
return return_None
return str # return empty string
def get_choices(
self,
include_blank=True,
blank_choice=BLANK_CHOICE_DASH,
limit_choices_to=None,
ordering=(),
):
"""
Return choices with a default blank choices included, for use
as <select> choices for this field.
"""
if self.choices is not None:
choices = list(self.choices)
if include_blank:
blank_defined = any(
choice in ("", None) for choice, _ in self.flatchoices
)
if not blank_defined:
choices = blank_choice + choices
return choices
rel_model = self.remote_field.model
limit_choices_to = limit_choices_to or self.get_limit_choices_to()
choice_func = operator.attrgetter(
self.remote_field.get_related_field().attname
if hasattr(self.remote_field, "get_related_field")
else "pk"
)
qs = rel_model._default_manager.complex_filter(limit_choices_to)
if ordering:
qs = qs.order_by(*ordering)
return (blank_choice if include_blank else []) + [
(choice_func(x), str(x)) for x in qs
]
def value_to_string(self, obj):
"""
Return a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return str(self.value_from_object(obj))
def _get_flatchoices(self):
"""Flattened version of choices tuple."""
if self.choices is None:
return []
flat = []
for choice, value in self.choices:
if isinstance(value, (list, tuple)):
flat.extend(value)
else:
flat.append((choice, value))
return flat
flatchoices = property(_get_flatchoices)
def save_form_data(self, instance, data):
setattr(instance, self.name, data)
def formfield(self, form_class=None, choices_form_class=None, **kwargs):
"""Return a django.forms.Field instance for this field."""
defaults = {
"required": not self.blank,
"label": capfirst(self.verbose_name),
"help_text": self.help_text,
}
if self.has_default():
if callable(self.default):
defaults["initial"] = self.default
defaults["show_hidden_initial"] = True
else:
defaults["initial"] = self.get_default()
if self.choices is not None:
# Fields with choices get special treatment.
include_blank = self.blank or not (
self.has_default() or "initial" in kwargs
)
defaults["choices"] = self.get_choices(include_blank=include_blank)
defaults["coerce"] = self.to_python
if self.null:
defaults["empty_value"] = None
if choices_form_class is not None:
form_class = choices_form_class
else:
form_class = forms.TypedChoiceField
# Many of the subclass-specific formfield arguments (min_value,
# max_value) don't apply for choice fields, so be sure to only pass
# the values that TypedChoiceField will understand.
for k in list(kwargs):
if k not in (
"coerce",
"empty_value",
"choices",
"required",
"widget",
"label",
"initial",
"help_text",
"error_messages",
"show_hidden_initial",
"disabled",
):
del kwargs[k]
defaults.update(kwargs)
if form_class is None:
form_class = forms.CharField
return form_class(**defaults)
def value_from_object(self, obj):
"""Return the value of this field in the given model instance."""
return getattr(obj, self.attname)
class BooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be either True or False."),
"invalid_nullable": _("“%(value)s” value must be either True, False, or None."),
}
description = _("Boolean (Either True or False)")
def get_internal_type(self):
return "BooleanField"
def to_python(self, value):
if self.null and value in self.empty_values:
return None
if value in (True, False):
# 1/0 are equal to True/False. bool() converts former to latter.
return bool(value)
if value in ("t", "True", "1"):
return True
if value in ("f", "False", "0"):
return False
raise exceptions.ValidationError(
self.error_messages["invalid_nullable" if self.null else "invalid"],
code="invalid",
params={"value": value},
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return self.to_python(value)
def formfield(self, **kwargs):
if self.choices is not None:
include_blank = not (self.has_default() or "initial" in kwargs)
defaults = {"choices": self.get_choices(include_blank=include_blank)}
else:
form_class = forms.NullBooleanField if self.null else forms.BooleanField
# In HTML checkboxes, 'required' means "must be checked" which is
# different from the choices case ("must select some value").
# required=False allows unchecked checkboxes.
defaults = {"form_class": form_class, "required": False}
return super().formfield(**{**defaults, **kwargs})
class CharField(Field):
def __init__(self, *args, db_collation=None, **kwargs):
super().__init__(*args, **kwargs)
self.db_collation = db_collation
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
@property
def description(self):
if self.max_length is not None:
return _("String (up to %(max_length)s)")
else:
return _("String (unlimited)")
def check(self, **kwargs):
databases = kwargs.get("databases") or []
return [
*super().check(**kwargs),
*self._check_db_collation(databases),
*self._check_max_length_attribute(**kwargs),
]
def _check_max_length_attribute(self, **kwargs):
if self.max_length is None:
if (
connection.features.supports_unlimited_charfield
or "supports_unlimited_charfield"
in self.model._meta.required_db_features
):
return []
return [
checks.Error(
"CharFields must define a 'max_length' attribute.",
obj=self,
id="fields.E120",
)
]
elif (
not isinstance(self.max_length, int)
or isinstance(self.max_length, bool)
or self.max_length <= 0
):
return [
checks.Error(
"'max_length' must be a positive integer.",
obj=self,
id="fields.E121",
)
]
else:
return []
def _check_db_collation(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
self.db_collation is None
or "supports_collation_on_charfield"
in self.model._meta.required_db_features
or connection.features.supports_collation_on_charfield
):
errors.append(
checks.Error(
"%s does not support a database collation on "
"CharFields." % connection.display_name,
obj=self,
id="fields.E190",
),
)
return errors
def cast_db_type(self, connection):
if self.max_length is None:
return connection.ops.cast_char_field_without_max_length
return super().cast_db_type(connection)
def db_parameters(self, connection):
db_params = super().db_parameters(connection)
db_params["collation"] = self.db_collation
return db_params
def get_internal_type(self):
return "CharField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {"max_length": self.max_length}
# TODO: Handle multiple backends with different feature flags.
if self.null and not connection.features.interprets_empty_strings_as_nulls:
defaults["empty_value"] = None
defaults.update(kwargs)
return super().formfield(**defaults)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.db_collation:
kwargs["db_collation"] = self.db_collation
return name, path, args, kwargs
class CommaSeparatedIntegerField(CharField):
default_validators = [validators.validate_comma_separated_integer_list]
description = _("Comma-separated integers")
system_check_removed_details = {
"msg": (
"CommaSeparatedIntegerField is removed except for support in "
"historical migrations."
),
"hint": (
"Use CharField(validators=[validate_comma_separated_integer_list]) "
"instead."
),
"id": "fields.E901",
}
def _to_naive(value):
if timezone.is_aware(value):
value = timezone.make_naive(value, datetime.timezone.utc)
return value
def _get_naive_now():
return _to_naive(timezone.now())
class DateTimeCheckMixin:
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_mutually_exclusive_options(),
*self._check_fix_default_value(),
]
def _check_mutually_exclusive_options(self):
# auto_now, auto_now_add, and default are mutually exclusive
# options. The use of more than one of these options together
# will trigger an Error
mutually_exclusive_options = [
self.auto_now_add,
self.auto_now,
self.has_default(),
]
enabled_options = [
option not in (None, False) for option in mutually_exclusive_options
].count(True)
if enabled_options > 1:
return [
checks.Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=self,
id="fields.E160",
)
]
else:
return []
def _check_fix_default_value(self):
return []
# Concrete subclasses use this in their implementations of
# _check_fix_default_value().
def _check_if_value_fixed(self, value, now=None):
"""
Check if the given value appears to have been provided as a "fixed"
time value, and include a warning in the returned list if it does. The
value argument must be a date object or aware/naive datetime object. If
now is provided, it must be a naive datetime object.
"""
if now is None:
now = _get_naive_now()
offset = datetime.timedelta(seconds=10)
lower = now - offset
upper = now + offset
if isinstance(value, datetime.datetime):
value = _to_naive(value)
else:
assert isinstance(value, datetime.date)
lower = lower.date()
upper = upper.date()
if lower <= value <= upper:
return [
checks.Warning(
"Fixed default value provided.",
hint=(
"It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`"
),
obj=self,
id="fields.W161",
)
]
return []
class DateField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid date format. It must be "
"in YYYY-MM-DD format."
),
"invalid_date": _(
"“%(value)s” value has the correct format (YYYY-MM-DD) "
"but it is an invalid date."
),
}
description = _("Date (without time)")
def __init__(
self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs
):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs["editable"] = False
kwargs["blank"] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, datetime.datetime):
value = _to_naive(value).date()
elif isinstance(value, datetime.date):
pass
else:
# No explicit date / datetime value -- no checks necessary
return []
# At this point, value is a date object.
return self._check_if_value_fixed(value)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now:
kwargs["auto_now"] = True
if self.auto_now_add:
kwargs["auto_now_add"] = True
if self.auto_now or self.auto_now_add:
del kwargs["editable"]
del kwargs["blank"]
return name, path, args, kwargs
def get_internal_type(self):
return "DateField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
if settings.USE_TZ and timezone.is_aware(value):
# Convert aware datetimes to the default time zone
# before casting them to dates (#17742).
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
return value.date()
if isinstance(value, datetime.date):
return value
try:
parsed = parse_date(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_date"],
code="invalid_date",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.date.today()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def contribute_to_class(self, cls, name, **kwargs):
super().contribute_to_class(cls, name, **kwargs)
if not self.null:
setattr(
cls,
"get_next_by_%s" % self.name,
partialmethod(
cls._get_next_or_previous_by_FIELD, field=self, is_next=True
),
)
setattr(
cls,
"get_previous_by_%s" % self.name,
partialmethod(
cls._get_next_or_previous_by_FIELD, field=self, is_next=False
),
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts dates into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DateField,
**kwargs,
}
)
class DateTimeField(DateField):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."
),
"invalid_date": _(
"“%(value)s” value has the correct format "
"(YYYY-MM-DD) but it is an invalid date."
),
"invalid_datetime": _(
"“%(value)s” value has the correct format "
"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
"but it is an invalid date/time."
),
}
description = _("Date (with time)")
# __init__ is inherited from DateField
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, (datetime.datetime, datetime.date)):
return self._check_if_value_fixed(value)
# No explicit date / datetime value -- no checks necessary.
return []
def get_internal_type(self):
return "DateTimeField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
if settings.USE_TZ:
# For backwards compatibility, interpret naive datetimes in
# local time. This won't work during DST change, but we can't
# do much about it, so we let the exceptions percolate up the
# call stack.
warnings.warn(
"DateTimeField %s.%s received a naive datetime "
"(%s) while time zone support is active."
% (self.model.__name__, self.name, value),
RuntimeWarning,
)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
try:
parsed = parse_datetime(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_datetime"],
code="invalid_datetime",
params={"value": value},
)
try:
parsed = parse_date(value)
if parsed is not None:
return datetime.datetime(parsed.year, parsed.month, parsed.day)
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_date"],
code="invalid_date",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = timezone.now()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
# contribute_to_class is inherited from DateField, it registers
# get_next_by_FOO and get_prev_by_FOO
def get_prep_value(self, value):
value = super().get_prep_value(value)
value = self.to_python(value)
if value is not None and settings.USE_TZ and timezone.is_naive(value):
# For backwards compatibility, interpret naive datetimes in local
# time. This won't work during DST change, but we can't do much
# about it, so we let the exceptions percolate up the call stack.
try:
name = "%s.%s" % (self.model.__name__, self.name)
except AttributeError:
name = "(unbound)"
warnings.warn(
"DateTimeField %s received a naive datetime (%s)"
" while time zone support is active." % (name, value),
RuntimeWarning,
)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
def get_db_prep_value(self, value, connection, prepared=False):
# Casts datetimes into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datetimefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DateTimeField,
**kwargs,
}
)
class DecimalField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be a decimal number."),
}
description = _("Decimal number")
def __init__(
self,
verbose_name=None,
name=None,
max_digits=None,
decimal_places=None,
**kwargs,
):
self.max_digits, self.decimal_places = max_digits, decimal_places
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
digits_errors = [
*self._check_decimal_places(),
*self._check_max_digits(),
]
if not digits_errors:
errors.extend(self._check_decimal_places_and_max_digits(**kwargs))
else:
errors.extend(digits_errors)
return errors
def _check_decimal_places(self):
try:
decimal_places = int(self.decimal_places)
if decimal_places < 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=self,
id="fields.E130",
)
]
except ValueError:
return [
checks.Error(
"'decimal_places' must be a non-negative integer.",
obj=self,
id="fields.E131",
)
]
else:
return []
def _check_max_digits(self):
try:
max_digits = int(self.max_digits)
if max_digits <= 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=self,
id="fields.E132",
)
]
except ValueError:
return [
checks.Error(
"'max_digits' must be a positive integer.",
obj=self,
id="fields.E133",
)
]
else:
return []
def _check_decimal_places_and_max_digits(self, **kwargs):
if int(self.decimal_places) > int(self.max_digits):
return [
checks.Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=self,
id="fields.E134",
)
]
return []
@cached_property
def validators(self):
return super().validators + [
validators.DecimalValidator(self.max_digits, self.decimal_places)
]
@cached_property
def context(self):
return decimal.Context(prec=self.max_digits)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.max_digits is not None:
kwargs["max_digits"] = self.max_digits
if self.decimal_places is not None:
kwargs["decimal_places"] = self.decimal_places
return name, path, args, kwargs
def get_internal_type(self):
return "DecimalField"
def to_python(self, value):
if value is None:
return value
try:
if isinstance(value, float):
decimal_value = self.context.create_decimal_from_float(value)
else:
decimal_value = decimal.Decimal(value)
except (decimal.InvalidOperation, TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
if not decimal_value.is_finite():
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
return decimal_value
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
if hasattr(value, "as_sql"):
return value
return connection.ops.adapt_decimalfield_value(
value, self.max_digits, self.decimal_places
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"max_digits": self.max_digits,
"decimal_places": self.decimal_places,
"form_class": forms.DecimalField,
**kwargs,
}
)
class DurationField(Field):
"""
Store timedelta objects.
Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint
of microseconds on other databases.
"""
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"[DD] [[HH:]MM:]ss[.uuuuuu] format."
)
}
description = _("Duration")
def get_internal_type(self):
return "DurationField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.timedelta):
return value
try:
parsed = parse_duration(value)
except ValueError:
pass
else:
if parsed is not None:
return parsed
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def get_db_prep_value(self, value, connection, prepared=False):
if connection.features.has_native_duration_field:
return value
if value is None:
return None
return duration_microseconds(value)
def get_db_converters(self, connection):
converters = []
if not connection.features.has_native_duration_field:
converters.append(connection.ops.convert_durationfield_value)
return converters + super().get_db_converters(connection)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else duration_string(val)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DurationField,
**kwargs,
}
)
class EmailField(CharField):
default_validators = [validators.validate_email]
description = _("Email address")
def __init__(self, *args, **kwargs):
# max_length=254 to be compliant with RFCs 3696 and 5321
kwargs.setdefault("max_length", 254)
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# We do not exclude max_length if it matches default as we want to change
# the default in future.
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause email validation to be performed
# twice.
return super().formfield(
**{
"form_class": forms.EmailField,
**kwargs,
}
)
class FilePathField(Field):
description = _("File path")
def __init__(
self,
verbose_name=None,
name=None,
path="",
match=None,
recursive=False,
allow_files=True,
allow_folders=False,
**kwargs,
):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
kwargs.setdefault("max_length", 100)
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_allowing_files_or_folders(**kwargs),
]
def _check_allowing_files_or_folders(self, **kwargs):
if not self.allow_files and not self.allow_folders:
return [
checks.Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' "
"set to True.",
obj=self,
id="fields.E140",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.path != "":
kwargs["path"] = self.path
if self.match is not None:
kwargs["match"] = self.match
if self.recursive is not False:
kwargs["recursive"] = self.recursive
if self.allow_files is not True:
kwargs["allow_files"] = self.allow_files
if self.allow_folders is not False:
kwargs["allow_folders"] = self.allow_folders
if kwargs.get("max_length") == 100:
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"path": self.path() if callable(self.path) else self.path,
"match": self.match,
"recursive": self.recursive,
"form_class": forms.FilePathField,
"allow_files": self.allow_files,
"allow_folders": self.allow_folders,
**kwargs,
}
)
def get_internal_type(self):
return "FilePathField"
class FloatField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be a float."),
}
description = _("Floating point number")
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
try:
return float(value)
except (TypeError, ValueError) as e:
raise e.__class__(
"Field '%s' expected a number but got %r." % (self.name, value),
) from e
def get_internal_type(self):
return "FloatField"
def to_python(self, value):
if value is None:
return value
try:
return float(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.FloatField,
**kwargs,
}
)
class IntegerField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be an integer."),
}
description = _("Integer")
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_max_length_warning(),
]
def _check_max_length_warning(self):
if self.max_length is not None:
return [
checks.Warning(
"'max_length' is ignored when used with %s."
% self.__class__.__name__,
hint="Remove 'max_length' from field",
obj=self,
id="fields.W122",
)
]
return []
@cached_property
def validators(self):
# These validators can't be added at field initialization time since
# they're based on values retrieved from `connection`.
validators_ = super().validators
internal_type = self.get_internal_type()
min_value, max_value = connection.ops.integer_field_range(internal_type)
if min_value is not None and not any(
(
isinstance(validator, validators.MinValueValidator)
and (
validator.limit_value()
if callable(validator.limit_value)
else validator.limit_value
)
>= min_value
)
for validator in validators_
):
validators_.append(validators.MinValueValidator(min_value))
if max_value is not None and not any(
(
isinstance(validator, validators.MaxValueValidator)
and (
validator.limit_value()
if callable(validator.limit_value)
else validator.limit_value
)
<= max_value
)
for validator in validators_
):
validators_.append(validators.MaxValueValidator(max_value))
return validators_
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
try:
return int(value)
except (TypeError, ValueError) as e:
raise e.__class__(
"Field '%s' expected a number but got %r." % (self.name, value),
) from e
def get_db_prep_value(self, value, connection, prepared=False):
value = super().get_db_prep_value(value, connection, prepared)
return connection.ops.adapt_integerfield_value(value, self.get_internal_type())
def get_internal_type(self):
return "IntegerField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.IntegerField,
**kwargs,
}
)
class BigIntegerField(IntegerField):
description = _("Big (8 byte) integer")
MAX_BIGINT = 9223372036854775807
def get_internal_type(self):
return "BigIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": -BigIntegerField.MAX_BIGINT - 1,
"max_value": BigIntegerField.MAX_BIGINT,
**kwargs,
}
)
class SmallIntegerField(IntegerField):
description = _("Small integer")
def get_internal_type(self):
return "SmallIntegerField"
class IPAddressField(Field):
empty_strings_allowed = False
description = _("IPv4 address")
system_check_removed_details = {
"msg": (
"IPAddressField has been removed except for support in "
"historical migrations."
),
"hint": "Use GenericIPAddressField instead.",
"id": "fields.E900",
}
def __init__(self, *args, **kwargs):
kwargs["max_length"] = 15
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def get_internal_type(self):
return "IPAddressField"
class GenericIPAddressField(Field):
empty_strings_allowed = False
description = _("IP address")
default_error_messages = {}
def __init__(
self,
verbose_name=None,
name=None,
protocol="both",
unpack_ipv4=False,
*args,
**kwargs,
):
self.unpack_ipv4 = unpack_ipv4
self.protocol = protocol
(
self.default_validators,
invalid_error_message,
) = validators.ip_address_validators(protocol, unpack_ipv4)
self.default_error_messages["invalid"] = invalid_error_message
kwargs["max_length"] = 39
super().__init__(verbose_name, name, *args, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_blank_and_null_values(**kwargs),
]
def _check_blank_and_null_values(self, **kwargs):
if not getattr(self, "null", False) and getattr(self, "blank", False):
return [
checks.Error(
"GenericIPAddressFields cannot have blank=True if null=False, "
"as blank values are stored as nulls.",
obj=self,
id="fields.E150",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.unpack_ipv4 is not False:
kwargs["unpack_ipv4"] = self.unpack_ipv4
if self.protocol != "both":
kwargs["protocol"] = self.protocol
if kwargs.get("max_length") == 39:
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return "GenericIPAddressField"
def to_python(self, value):
if value is None:
return None
if not isinstance(value, str):
value = str(value)
value = value.strip()
if ":" in value:
return clean_ipv6_address(
value, self.unpack_ipv4, self.error_messages["invalid"]
)
return value
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_ipaddressfield_value(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
if value and ":" in value:
try:
return clean_ipv6_address(value, self.unpack_ipv4)
except exceptions.ValidationError:
pass
return str(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"protocol": self.protocol,
"form_class": forms.GenericIPAddressField,
**kwargs,
}
)
class NullBooleanField(BooleanField):
default_error_messages = {
"invalid": _("“%(value)s” value must be either None, True or False."),
"invalid_nullable": _("“%(value)s” value must be either None, True or False."),
}
description = _("Boolean (Either True, False or None)")
system_check_removed_details = {
"msg": (
"NullBooleanField is removed except for support in historical "
"migrations."
),
"hint": "Use BooleanField(null=True) instead.",
"id": "fields.E903",
}
def __init__(self, *args, **kwargs):
kwargs["null"] = True
kwargs["blank"] = True
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["null"]
del kwargs["blank"]
return name, path, args, kwargs
class PositiveIntegerRelDbTypeMixin:
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
if not hasattr(cls, "integer_field_class"):
cls.integer_field_class = next(
(
parent
for parent in cls.__mro__[1:]
if issubclass(parent, IntegerField)
),
None,
)
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. In most cases, a foreign key pointing to a positive integer
primary key will have an integer column data type but some databases
(e.g. MySQL) have an unsigned integer type. In that case
(related_fields_match_type=True), the primary key should return its
db_type.
"""
if connection.features.related_fields_match_type:
return self.db_type(connection)
else:
return self.integer_field_class().db_type(connection=connection)
class PositiveBigIntegerField(PositiveIntegerRelDbTypeMixin, BigIntegerField):
description = _("Positive big integer")
def get_internal_type(self):
return "PositiveBigIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
description = _("Positive integer")
def get_internal_type(self):
return "PositiveIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, SmallIntegerField):
description = _("Positive small integer")
def get_internal_type(self):
return "PositiveSmallIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class SlugField(CharField):
default_validators = [validators.validate_slug]
description = _("Slug (up to %(max_length)s)")
def __init__(
self, *args, max_length=50, db_index=True, allow_unicode=False, **kwargs
):
self.allow_unicode = allow_unicode
if self.allow_unicode:
self.default_validators = [validators.validate_unicode_slug]
super().__init__(*args, max_length=max_length, db_index=db_index, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 50:
del kwargs["max_length"]
if self.db_index is False:
kwargs["db_index"] = False
else:
del kwargs["db_index"]
if self.allow_unicode is not False:
kwargs["allow_unicode"] = self.allow_unicode
return name, path, args, kwargs
def get_internal_type(self):
return "SlugField"
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.SlugField,
"allow_unicode": self.allow_unicode,
**kwargs,
}
)
class TextField(Field):
description = _("Text")
def __init__(self, *args, db_collation=None, **kwargs):
super().__init__(*args, **kwargs)
self.db_collation = db_collation
def check(self, **kwargs):
databases = kwargs.get("databases") or []
return [
*super().check(**kwargs),
*self._check_db_collation(databases),
]
def _check_db_collation(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
self.db_collation is None
or "supports_collation_on_textfield"
in self.model._meta.required_db_features
or connection.features.supports_collation_on_textfield
):
errors.append(
checks.Error(
"%s does not support a database collation on "
"TextFields." % connection.display_name,
obj=self,
id="fields.E190",
),
)
return errors
def db_parameters(self, connection):
db_params = super().db_parameters(connection)
db_params["collation"] = self.db_collation
return db_params
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
return super().formfield(
**{
"max_length": self.max_length,
**({} if self.choices is not None else {"widget": forms.Textarea}),
**kwargs,
}
)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.db_collation:
kwargs["db_collation"] = self.db_collation
return name, path, args, kwargs
class TimeField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"HH:MM[:ss[.uuuuuu]] format."
),
"invalid_time": _(
"“%(value)s” value has the correct format "
"(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."
),
}
description = _("Time")
def __init__(
self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs
):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs["editable"] = False
kwargs["blank"] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, datetime.datetime):
now = None
elif isinstance(value, datetime.time):
now = _get_naive_now()
# This will not use the right date in the race condition where now
# is just before the date change and value is just past 0:00.
value = datetime.datetime.combine(now.date(), value)
else:
# No explicit time / datetime value -- no checks necessary
return []
# At this point, value is a datetime object.
return self._check_if_value_fixed(value, now=now)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now is not False:
kwargs["auto_now"] = self.auto_now
if self.auto_now_add is not False:
kwargs["auto_now_add"] = self.auto_now_add
if self.auto_now or self.auto_now_add:
del kwargs["blank"]
del kwargs["editable"]
return name, path, args, kwargs
def get_internal_type(self):
return "TimeField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, datetime.time):
return value
if isinstance(value, datetime.datetime):
# Not usually a good idea to pass in a datetime here (it loses
# information), but this can be a side-effect of interacting with a
# database backend (e.g. Oracle), so we'll be accommodating.
return value.time()
try:
parsed = parse_time(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_time"],
code="invalid_time",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.datetime.now().time()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts times into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_timefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.TimeField,
**kwargs,
}
)
class URLField(CharField):
default_validators = [validators.URLValidator()]
description = _("URL")
def __init__(self, verbose_name=None, name=None, **kwargs):
kwargs.setdefault("max_length", 200)
super().__init__(verbose_name, name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 200:
del kwargs["max_length"]
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed
# twice.
return super().formfield(
**{
"form_class": forms.URLField,
**kwargs,
}
)
class BinaryField(Field):
description = _("Raw binary data")
empty_values = [None, b""]
def __init__(self, *args, **kwargs):
kwargs.setdefault("editable", False)
super().__init__(*args, **kwargs)
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
return [*super().check(**kwargs), *self._check_str_default_value()]
def _check_str_default_value(self):
if self.has_default() and isinstance(self.default, str):
return [
checks.Error(
"BinaryField's default cannot be a string. Use bytes "
"content instead.",
obj=self,
id="fields.E170",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.editable:
kwargs["editable"] = True
else:
del kwargs["editable"]
return name, path, args, kwargs
def get_internal_type(self):
return "BinaryField"
def get_placeholder(self, value, compiler, connection):
return connection.ops.binary_placeholder_sql(value)
def get_default(self):
if self.has_default() and not callable(self.default):
return self.default
default = super().get_default()
if default == "":
return b""
return default
def get_db_prep_value(self, value, connection, prepared=False):
value = super().get_db_prep_value(value, connection, prepared)
if value is not None:
return connection.Database.Binary(value)
return value
def value_to_string(self, obj):
"""Binary data is serialized as base64"""
return b64encode(self.value_from_object(obj)).decode("ascii")
def to_python(self, value):
# If it's a string, it should be base64-encoded data
if isinstance(value, str):
return memoryview(b64decode(value.encode("ascii")))
return value
class UUIDField(Field):
default_error_messages = {
"invalid": _("“%(value)s” is not a valid UUID."),
}
description = _("Universally unique identifier")
empty_strings_allowed = False
def __init__(self, verbose_name=None, **kwargs):
kwargs["max_length"] = 32
super().__init__(verbose_name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return "UUIDField"
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
if not isinstance(value, uuid.UUID):
value = self.to_python(value)
if connection.features.has_native_uuid_field:
return value
return value.hex
def to_python(self, value):
if value is not None and not isinstance(value, uuid.UUID):
input_form = "int" if isinstance(value, int) else "hex"
try:
return uuid.UUID(**{input_form: value})
except (AttributeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
return value
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.UUIDField,
**kwargs,
}
)
class AutoFieldMixin:
db_returning = True
def __init__(self, *args, **kwargs):
kwargs["blank"] = True
super().__init__(*args, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_primary_key(),
]
def _check_primary_key(self):
if not self.primary_key:
return [
checks.Error(
"AutoFields must set primary_key=True.",
obj=self,
id="fields.E100",
),
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["blank"]
kwargs["primary_key"] = True
return name, path, args, kwargs
def validate(self, value, model_instance):
pass
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
value = connection.ops.validate_autopk_value(value)
return value
def contribute_to_class(self, cls, name, **kwargs):
if cls._meta.auto_field:
raise ValueError(
"Model %s can't have more than one auto-generated field."
% cls._meta.label
)
super().contribute_to_class(cls, name, **kwargs)
cls._meta.auto_field = self
def formfield(self, **kwargs):
return None
class AutoFieldMeta(type):
"""
Metaclass to maintain backward inheritance compatibility for AutoField.
It is intended that AutoFieldMixin become public API when it is possible to
create a non-integer automatically-generated field using column defaults
stored in the database.
In many areas Django also relies on using isinstance() to check for an
automatically-generated field as a subclass of AutoField. A new flag needs
to be implemented on Field to be used instead.
When these issues have been addressed, this metaclass could be used to
deprecate inheritance from AutoField and use of isinstance() with AutoField
for detecting automatically-generated fields.
"""
@property
def _subclasses(self):
return (BigAutoField, SmallAutoField)
def __instancecheck__(self, instance):
return isinstance(instance, self._subclasses) or super().__instancecheck__(
instance
)
def __subclasscheck__(self, subclass):
return issubclass(subclass, self._subclasses) or super().__subclasscheck__(
subclass
)
class AutoField(AutoFieldMixin, IntegerField, metaclass=AutoFieldMeta):
def get_internal_type(self):
return "AutoField"
def rel_db_type(self, connection):
return IntegerField().db_type(connection=connection)
class BigAutoField(AutoFieldMixin, BigIntegerField):
def get_internal_type(self):
return "BigAutoField"
def rel_db_type(self, connection):
return BigIntegerField().db_type(connection=connection)
class SmallAutoField(AutoFieldMixin, SmallIntegerField):
def get_internal_type(self):
return "SmallAutoField"
def rel_db_type(self, connection):
return SmallIntegerField().db_type(connection=connection)
|
829617fcce5e0be4eca2c49c303b051893e0c75d0b7d614adb7599f10075e3f5 | import functools
import inspect
from functools import partial
from django import forms
from django.apps import apps
from django.conf import SettingsReference, settings
from django.core import checks, exceptions
from django.db import connection, router
from django.db.backends import utils
from django.db.models import Q
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import CASCADE, SET_DEFAULT, SET_NULL
from django.db.models.query_utils import PathInfo
from django.db.models.utils import make_model_tuple
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from . import Field
from .mixins import FieldCacheMixin
from .related_descriptors import (
ForeignKeyDeferredAttribute,
ForwardManyToOneDescriptor,
ForwardOneToOneDescriptor,
ManyToManyDescriptor,
ReverseManyToOneDescriptor,
ReverseOneToOneDescriptor,
)
from .related_lookups import (
RelatedExact,
RelatedGreaterThan,
RelatedGreaterThanOrEqual,
RelatedIn,
RelatedIsNull,
RelatedLessThan,
RelatedLessThanOrEqual,
)
from .reverse_related import ForeignObjectRel, ManyToManyRel, ManyToOneRel, OneToOneRel
RECURSIVE_RELATIONSHIP_CONSTANT = "self"
def resolve_relation(scope_model, relation):
"""
Transform relation into a model or fully-qualified model string of the form
"app_label.ModelName", relative to scope_model.
The relation argument can be:
* RECURSIVE_RELATIONSHIP_CONSTANT, i.e. the string "self", in which case
the model argument will be returned.
* A bare model name without an app_label, in which case scope_model's
app_label will be prepended.
* An "app_label.ModelName" string.
* A model class, which will be returned unchanged.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
relation = scope_model
# Look for an "app.Model" relation
if isinstance(relation, str):
if "." not in relation:
relation = "%s.%s" % (scope_model._meta.app_label, relation)
return relation
def lazy_related_operation(function, model, *related_models, **kwargs):
"""
Schedule `function` to be called once `model` and all `related_models`
have been imported and registered with the app registry. `function` will
be called with the newly-loaded model classes as its positional arguments,
plus any optional keyword arguments.
The `model` argument must be a model class. Each subsequent positional
argument is another model, or a reference to another model - see
`resolve_relation()` for the various forms these may take. Any relative
references will be resolved relative to `model`.
This is a convenience wrapper for `Apps.lazy_model_operation` - the app
registry model used is the one found in `model._meta.apps`.
"""
models = [model] + [resolve_relation(model, rel) for rel in related_models]
model_keys = (make_model_tuple(m) for m in models)
apps = model._meta.apps
return apps.lazy_model_operation(partial(function, **kwargs), *model_keys)
class RelatedField(FieldCacheMixin, Field):
"""Base class that all relational fields inherit from."""
# Field flags
one_to_many = False
one_to_one = False
many_to_many = False
many_to_one = False
def __init__(
self,
related_name=None,
related_query_name=None,
limit_choices_to=None,
**kwargs,
):
self._related_name = related_name
self._related_query_name = related_query_name
self._limit_choices_to = limit_choices_to
super().__init__(**kwargs)
@cached_property
def related_model(self):
# Can't cache this property until all the models are loaded.
apps.check_models_ready()
return self.remote_field.model
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_related_name_is_valid(),
*self._check_related_query_name_is_valid(),
*self._check_relation_model_exists(),
*self._check_referencing_to_swapped_model(),
*self._check_clashes(),
]
def _check_related_name_is_valid(self):
import keyword
related_name = self.remote_field.related_name
if related_name is None:
return []
is_valid_id = (
not keyword.iskeyword(related_name) and related_name.isidentifier()
)
if not (is_valid_id or related_name.endswith("+")):
return [
checks.Error(
"The name '%s' is invalid related_name for field %s.%s"
% (
self.remote_field.related_name,
self.model._meta.object_name,
self.name,
),
hint=(
"Related name must be a valid Python identifier or end with a "
"'+'"
),
obj=self,
id="fields.E306",
)
]
return []
def _check_related_query_name_is_valid(self):
if self.remote_field.is_hidden():
return []
rel_query_name = self.related_query_name()
errors = []
if rel_query_name.endswith("_"):
errors.append(
checks.Error(
"Reverse query name '%s' must not end with an underscore."
% rel_query_name,
hint=(
"Add or change a related_name or related_query_name "
"argument for this field."
),
obj=self,
id="fields.E308",
)
)
if LOOKUP_SEP in rel_query_name:
errors.append(
checks.Error(
"Reverse query name '%s' must not contain '%s'."
% (rel_query_name, LOOKUP_SEP),
hint=(
"Add or change a related_name or related_query_name "
"argument for this field."
),
obj=self,
id="fields.E309",
)
)
return errors
def _check_relation_model_exists(self):
rel_is_missing = self.remote_field.model not in self.opts.apps.get_models()
rel_is_string = isinstance(self.remote_field.model, str)
model_name = (
self.remote_field.model
if rel_is_string
else self.remote_field.model._meta.object_name
)
if rel_is_missing and (
rel_is_string or not self.remote_field.model._meta.swapped
):
return [
checks.Error(
"Field defines a relation with model '%s', which is either "
"not installed, or is abstract." % model_name,
obj=self,
id="fields.E300",
)
]
return []
def _check_referencing_to_swapped_model(self):
if (
self.remote_field.model not in self.opts.apps.get_models()
and not isinstance(self.remote_field.model, str)
and self.remote_field.model._meta.swapped
):
return [
checks.Error(
"Field defines a relation with the model '%s', which has "
"been swapped out." % self.remote_field.model._meta.label,
hint="Update the relation to point at 'settings.%s'."
% self.remote_field.model._meta.swappable,
obj=self,
id="fields.E301",
)
]
return []
def _check_clashes(self):
"""Check accessor and reverse query name clashes."""
from django.db.models.base import ModelBase
errors = []
opts = self.model._meta
# f.remote_field.model may be a string instead of a model. Skip if
# model name is not resolved.
if not isinstance(self.remote_field.model, ModelBase):
return []
# Consider that we are checking field `Model.foreign` and the models
# are:
#
# class Target(models.Model):
# model = models.IntegerField()
# model_set = models.IntegerField()
#
# class Model(models.Model):
# foreign = models.ForeignKey(Target)
# m2m = models.ManyToManyField(Target)
# rel_opts.object_name == "Target"
rel_opts = self.remote_field.model._meta
# If the field doesn't install a backward relation on the target model
# (so `is_hidden` returns True), then there are no clashes to check
# and we can skip these fields.
rel_is_hidden = self.remote_field.is_hidden()
rel_name = self.remote_field.get_accessor_name() # i. e. "model_set"
rel_query_name = self.related_query_name() # i. e. "model"
# i.e. "app_label.Model.field".
field_name = "%s.%s" % (opts.label, self.name)
# Check clashes between accessor or reverse query name of `field`
# and any other field name -- i.e. accessor for Model.foreign is
# model_set and it clashes with Target.model_set.
potential_clashes = rel_opts.fields + rel_opts.many_to_many
for clash_field in potential_clashes:
# i.e. "app_label.Target.model_set".
clash_name = "%s.%s" % (rel_opts.label, clash_field.name)
if not rel_is_hidden and clash_field.name == rel_name:
errors.append(
checks.Error(
f"Reverse accessor '{rel_opts.object_name}.{rel_name}' "
f"for '{field_name}' clashes with field name "
f"'{clash_name}'.",
hint=(
"Rename field '%s', or add/change a related_name "
"argument to the definition for field '%s'."
)
% (clash_name, field_name),
obj=self,
id="fields.E302",
)
)
if clash_field.name == rel_query_name:
errors.append(
checks.Error(
"Reverse query name for '%s' clashes with field name '%s'."
% (field_name, clash_name),
hint=(
"Rename field '%s', or add/change a related_name "
"argument to the definition for field '%s'."
)
% (clash_name, field_name),
obj=self,
id="fields.E303",
)
)
# Check clashes between accessors/reverse query names of `field` and
# any other field accessor -- i. e. Model.foreign accessor clashes with
# Model.m2m accessor.
potential_clashes = (r for r in rel_opts.related_objects if r.field is not self)
for clash_field in potential_clashes:
# i.e. "app_label.Model.m2m".
clash_name = "%s.%s" % (
clash_field.related_model._meta.label,
clash_field.field.name,
)
if not rel_is_hidden and clash_field.get_accessor_name() == rel_name:
errors.append(
checks.Error(
f"Reverse accessor '{rel_opts.object_name}.{rel_name}' "
f"for '{field_name}' clashes with reverse accessor for "
f"'{clash_name}'.",
hint=(
"Add or change a related_name argument "
"to the definition for '%s' or '%s'."
)
% (field_name, clash_name),
obj=self,
id="fields.E304",
)
)
if clash_field.get_accessor_name() == rel_query_name:
errors.append(
checks.Error(
"Reverse query name for '%s' clashes with reverse query name "
"for '%s'." % (field_name, clash_name),
hint=(
"Add or change a related_name argument "
"to the definition for '%s' or '%s'."
)
% (field_name, clash_name),
obj=self,
id="fields.E305",
)
)
return errors
def db_type(self, connection):
# By default related field will not have a column as it relates to
# columns from another table.
return None
def contribute_to_class(self, cls, name, private_only=False, **kwargs):
super().contribute_to_class(cls, name, private_only=private_only, **kwargs)
self.opts = cls._meta
if not cls._meta.abstract:
if self.remote_field.related_name:
related_name = self.remote_field.related_name
else:
related_name = self.opts.default_related_name
if related_name:
related_name %= {
"class": cls.__name__.lower(),
"model_name": cls._meta.model_name.lower(),
"app_label": cls._meta.app_label.lower(),
}
self.remote_field.related_name = related_name
if self.remote_field.related_query_name:
related_query_name = self.remote_field.related_query_name % {
"class": cls.__name__.lower(),
"app_label": cls._meta.app_label.lower(),
}
self.remote_field.related_query_name = related_query_name
def resolve_related_class(model, related, field):
field.remote_field.model = related
field.do_related_class(related, model)
lazy_related_operation(
resolve_related_class, cls, self.remote_field.model, field=self
)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self._limit_choices_to:
kwargs["limit_choices_to"] = self._limit_choices_to
if self._related_name is not None:
kwargs["related_name"] = self._related_name
if self._related_query_name is not None:
kwargs["related_query_name"] = self._related_query_name
return name, path, args, kwargs
def get_forward_related_filter(self, obj):
"""
Return the keyword arguments that when supplied to
self.model.object.filter(), would select all instances related through
this field to the remote obj. This is used to build the querysets
returned by related descriptors. obj is an instance of
self.related_field.model.
"""
return {
"%s__%s" % (self.name, rh_field.name): getattr(obj, rh_field.attname)
for _, rh_field in self.related_fields
}
def get_reverse_related_filter(self, obj):
"""
Complement to get_forward_related_filter(). Return the keyword
arguments that when passed to self.related_field.model.object.filter()
select all instances of self.related_field.model related through
this field to obj. obj is an instance of self.model.
"""
base_q = Q.create(
[
(rh_field.attname, getattr(obj, lh_field.attname))
for lh_field, rh_field in self.related_fields
]
)
descriptor_filter = self.get_extra_descriptor_filter(obj)
if isinstance(descriptor_filter, dict):
return base_q & Q(**descriptor_filter)
elif descriptor_filter:
return base_q & descriptor_filter
return base_q
@property
def swappable_setting(self):
"""
Get the setting that this is powered from for swapping, or None
if it's not swapped in / marked with swappable=False.
"""
if self.swappable:
# Work out string form of "to"
if isinstance(self.remote_field.model, str):
to_string = self.remote_field.model
else:
to_string = self.remote_field.model._meta.label
return apps.get_swappable_settings_name(to_string)
return None
def set_attributes_from_rel(self):
self.name = self.name or (
self.remote_field.model._meta.model_name
+ "_"
+ self.remote_field.model._meta.pk.name
)
if self.verbose_name is None:
self.verbose_name = self.remote_field.model._meta.verbose_name
self.remote_field.set_field_name()
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
self.contribute_to_related_class(other, self.remote_field)
def get_limit_choices_to(self):
"""
Return ``limit_choices_to`` for this model field.
If it is a callable, it will be invoked and the result will be
returned.
"""
if callable(self.remote_field.limit_choices_to):
return self.remote_field.limit_choices_to()
return self.remote_field.limit_choices_to
def formfield(self, **kwargs):
"""
Pass ``limit_choices_to`` to the field being constructed.
Only passes it if there is a type that supports related fields.
This is a similar strategy used to pass the ``queryset`` to the field
being constructed.
"""
defaults = {}
if hasattr(self.remote_field, "get_related_field"):
# If this is a callable, do not invoke it here. Just pass
# it in the defaults for when the form class will later be
# instantiated.
limit_choices_to = self.remote_field.limit_choices_to
defaults.update(
{
"limit_choices_to": limit_choices_to,
}
)
defaults.update(kwargs)
return super().formfield(**defaults)
def related_query_name(self):
"""
Define the name that can be used to identify this related object in a
table-spanning query.
"""
return (
self.remote_field.related_query_name
or self.remote_field.related_name
or self.opts.model_name
)
@property
def target_field(self):
"""
When filtering against this relation, return the field on the remote
model against which the filtering should happen.
"""
target_fields = self.path_infos[-1].target_fields
if len(target_fields) > 1:
raise exceptions.FieldError(
"The relation has multiple target fields, but only single target field "
"was asked for"
)
return target_fields[0]
def get_cache_name(self):
return self.name
class ForeignObject(RelatedField):
"""
Abstraction of the ForeignKey relation to support multi-column relations.
"""
# Field flags
many_to_many = False
many_to_one = True
one_to_many = False
one_to_one = False
requires_unique_target = True
related_accessor_class = ReverseManyToOneDescriptor
forward_related_accessor_class = ForwardManyToOneDescriptor
rel_class = ForeignObjectRel
def __init__(
self,
to,
on_delete,
from_fields,
to_fields,
rel=None,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
swappable=True,
**kwargs,
):
if rel is None:
rel = self.rel_class(
self,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
super().__init__(
rel=rel,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
**kwargs,
)
self.from_fields = from_fields
self.to_fields = to_fields
self.swappable = swappable
def __copy__(self):
obj = super().__copy__()
# Remove any cached PathInfo values.
obj.__dict__.pop("path_infos", None)
obj.__dict__.pop("reverse_path_infos", None)
return obj
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_to_fields_exist(),
*self._check_unique_target(),
]
def _check_to_fields_exist(self):
# Skip nonexistent models.
if isinstance(self.remote_field.model, str):
return []
errors = []
for to_field in self.to_fields:
if to_field:
try:
self.remote_field.model._meta.get_field(to_field)
except exceptions.FieldDoesNotExist:
errors.append(
checks.Error(
"The to_field '%s' doesn't exist on the related "
"model '%s'."
% (to_field, self.remote_field.model._meta.label),
obj=self,
id="fields.E312",
)
)
return errors
def _check_unique_target(self):
rel_is_string = isinstance(self.remote_field.model, str)
if rel_is_string or not self.requires_unique_target:
return []
try:
self.foreign_related_fields
except exceptions.FieldDoesNotExist:
return []
if not self.foreign_related_fields:
return []
unique_foreign_fields = {
frozenset([f.name])
for f in self.remote_field.model._meta.get_fields()
if getattr(f, "unique", False)
}
unique_foreign_fields.update(
{frozenset(ut) for ut in self.remote_field.model._meta.unique_together}
)
unique_foreign_fields.update(
{
frozenset(uc.fields)
for uc in self.remote_field.model._meta.total_unique_constraints
}
)
foreign_fields = {f.name for f in self.foreign_related_fields}
has_unique_constraint = any(u <= foreign_fields for u in unique_foreign_fields)
if not has_unique_constraint and len(self.foreign_related_fields) > 1:
field_combination = ", ".join(
"'%s'" % rel_field.name for rel_field in self.foreign_related_fields
)
model_name = self.remote_field.model.__name__
return [
checks.Error(
"No subset of the fields %s on model '%s' is unique."
% (field_combination, model_name),
hint=(
"Mark a single field as unique=True or add a set of "
"fields to a unique constraint (via unique_together "
"or a UniqueConstraint (without condition) in the "
"model Meta.constraints)."
),
obj=self,
id="fields.E310",
)
]
elif not has_unique_constraint:
field_name = self.foreign_related_fields[0].name
model_name = self.remote_field.model.__name__
return [
checks.Error(
"'%s.%s' must be unique because it is referenced by "
"a foreign key." % (model_name, field_name),
hint=(
"Add unique=True to this field or add a "
"UniqueConstraint (without condition) in the model "
"Meta.constraints."
),
obj=self,
id="fields.E311",
)
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
kwargs["on_delete"] = self.remote_field.on_delete
kwargs["from_fields"] = self.from_fields
kwargs["to_fields"] = self.to_fields
if self.remote_field.parent_link:
kwargs["parent_link"] = self.remote_field.parent_link
if isinstance(self.remote_field.model, str):
if "." in self.remote_field.model:
app_label, model_name = self.remote_field.model.split(".")
kwargs["to"] = "%s.%s" % (app_label, model_name.lower())
else:
kwargs["to"] = self.remote_field.model.lower()
else:
kwargs["to"] = self.remote_field.model._meta.label_lower
# If swappable is True, then see if we're actually pointing to the target
# of a swap.
swappable_setting = self.swappable_setting
if swappable_setting is not None:
# If it's already a settings reference, error
if hasattr(kwargs["to"], "setting_name"):
if kwargs["to"].setting_name != swappable_setting:
raise ValueError(
"Cannot deconstruct a ForeignKey pointing to a model "
"that is swapped in place of more than one model (%s and %s)"
% (kwargs["to"].setting_name, swappable_setting)
)
# Set it
kwargs["to"] = SettingsReference(
kwargs["to"],
swappable_setting,
)
return name, path, args, kwargs
def resolve_related_fields(self):
if not self.from_fields or len(self.from_fields) != len(self.to_fields):
raise ValueError(
"Foreign Object from and to fields must be the same non-zero length"
)
if isinstance(self.remote_field.model, str):
raise ValueError(
"Related model %r cannot be resolved" % self.remote_field.model
)
related_fields = []
for index in range(len(self.from_fields)):
from_field_name = self.from_fields[index]
to_field_name = self.to_fields[index]
from_field = (
self
if from_field_name == RECURSIVE_RELATIONSHIP_CONSTANT
else self.opts.get_field(from_field_name)
)
to_field = (
self.remote_field.model._meta.pk
if to_field_name is None
else self.remote_field.model._meta.get_field(to_field_name)
)
related_fields.append((from_field, to_field))
return related_fields
@cached_property
def related_fields(self):
return self.resolve_related_fields()
@cached_property
def reverse_related_fields(self):
return [(rhs_field, lhs_field) for lhs_field, rhs_field in self.related_fields]
@cached_property
def local_related_fields(self):
return tuple(lhs_field for lhs_field, rhs_field in self.related_fields)
@cached_property
def foreign_related_fields(self):
return tuple(
rhs_field for lhs_field, rhs_field in self.related_fields if rhs_field
)
def get_local_related_value(self, instance):
return self.get_instance_value_for_fields(instance, self.local_related_fields)
def get_foreign_related_value(self, instance):
return self.get_instance_value_for_fields(instance, self.foreign_related_fields)
@staticmethod
def get_instance_value_for_fields(instance, fields):
ret = []
opts = instance._meta
for field in fields:
# Gotcha: in some cases (like fixture loading) a model can have
# different values in parent_ptr_id and parent's id. So, use
# instance.pk (that is, parent_ptr_id) when asked for instance.id.
if field.primary_key:
possible_parent_link = opts.get_ancestor_link(field.model)
if (
not possible_parent_link
or possible_parent_link.primary_key
or possible_parent_link.model._meta.abstract
):
ret.append(instance.pk)
continue
ret.append(getattr(instance, field.attname))
return tuple(ret)
def get_attname_column(self):
attname, column = super().get_attname_column()
return attname, None
def get_joining_columns(self, reverse_join=False):
source = self.reverse_related_fields if reverse_join else self.related_fields
return tuple(
(lhs_field.column, rhs_field.column) for lhs_field, rhs_field in source
)
def get_reverse_joining_columns(self):
return self.get_joining_columns(reverse_join=True)
def get_extra_descriptor_filter(self, instance):
"""
Return an extra filter condition for related object fetching when
user does 'instance.fieldname', that is the extra filter is used in
the descriptor of the field.
The filter should be either a dict usable in .filter(**kwargs) call or
a Q-object. The condition will be ANDed together with the relation's
joining columns.
A parallel method is get_extra_restriction() which is used in
JOIN and subquery conditions.
"""
return {}
def get_extra_restriction(self, alias, related_alias):
"""
Return a pair condition used for joining and subquery pushdown. The
condition is something that responds to as_sql(compiler, connection)
method.
Note that currently referring both the 'alias' and 'related_alias'
will not work in some conditions, like subquery pushdown.
A parallel method is get_extra_descriptor_filter() which is used in
instance.fieldname related object fetching.
"""
return None
def get_path_info(self, filtered_relation=None):
"""Get path from this field to the related model."""
opts = self.remote_field.model._meta
from_opts = self.model._meta
return [
PathInfo(
from_opts=from_opts,
to_opts=opts,
target_fields=self.foreign_related_fields,
join_field=self,
m2m=False,
direct=True,
filtered_relation=filtered_relation,
)
]
@cached_property
def path_infos(self):
return self.get_path_info()
def get_reverse_path_info(self, filtered_relation=None):
"""Get path from the related model to this field's model."""
opts = self.model._meta
from_opts = self.remote_field.model._meta
return [
PathInfo(
from_opts=from_opts,
to_opts=opts,
target_fields=(opts.pk,),
join_field=self.remote_field,
m2m=not self.unique,
direct=False,
filtered_relation=filtered_relation,
)
]
@cached_property
def reverse_path_infos(self):
return self.get_reverse_path_info()
@classmethod
@functools.lru_cache(maxsize=None)
def get_class_lookups(cls):
bases = inspect.getmro(cls)
bases = bases[: bases.index(ForeignObject) + 1]
class_lookups = [parent.__dict__.get("class_lookups", {}) for parent in bases]
return cls.merge_dicts(class_lookups)
def contribute_to_class(self, cls, name, private_only=False, **kwargs):
super().contribute_to_class(cls, name, private_only=private_only, **kwargs)
setattr(cls, self.name, self.forward_related_accessor_class(self))
def contribute_to_related_class(self, cls, related):
# Internal FK's - i.e., those with a related name ending with '+' -
# and swapped models don't get a related descriptor.
if (
not self.remote_field.is_hidden()
and not related.related_model._meta.swapped
):
setattr(
cls._meta.concrete_model,
related.get_accessor_name(),
self.related_accessor_class(related),
)
# While 'limit_choices_to' might be a callable, simply pass
# it along for later - this is too early because it's still
# model load time.
if self.remote_field.limit_choices_to:
cls._meta.related_fkey_lookups.append(
self.remote_field.limit_choices_to
)
ForeignObject.register_lookup(RelatedIn)
ForeignObject.register_lookup(RelatedExact)
ForeignObject.register_lookup(RelatedLessThan)
ForeignObject.register_lookup(RelatedGreaterThan)
ForeignObject.register_lookup(RelatedGreaterThanOrEqual)
ForeignObject.register_lookup(RelatedLessThanOrEqual)
ForeignObject.register_lookup(RelatedIsNull)
class ForeignKey(ForeignObject):
"""
Provide a many-to-one relation by adding a column to the local model
to hold the remote value.
By default ForeignKey will target the pk of the remote model but this
behavior can be changed by using the ``to_field`` argument.
"""
descriptor_class = ForeignKeyDeferredAttribute
# Field flags
many_to_many = False
many_to_one = True
one_to_many = False
one_to_one = False
rel_class = ManyToOneRel
empty_strings_allowed = False
default_error_messages = {
"invalid": _("%(model)s instance with %(field)s %(value)r does not exist.")
}
description = _("Foreign Key (type determined by related field)")
def __init__(
self,
to,
on_delete,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
to_field=None,
db_constraint=True,
**kwargs,
):
try:
to._meta.model_name
except AttributeError:
if not isinstance(to, str):
raise TypeError(
"%s(%r) is invalid. First parameter to ForeignKey must be "
"either a model, a model name, or the string %r"
% (
self.__class__.__name__,
to,
RECURSIVE_RELATIONSHIP_CONSTANT,
)
)
else:
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
if not callable(on_delete):
raise TypeError("on_delete must be callable.")
kwargs["rel"] = self.rel_class(
self,
to,
to_field,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
kwargs.setdefault("db_index", True)
super().__init__(
to,
on_delete,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
from_fields=[RECURSIVE_RELATIONSHIP_CONSTANT],
to_fields=[to_field],
**kwargs,
)
self.db_constraint = db_constraint
def __class_getitem__(cls, *args, **kwargs):
return cls
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_on_delete(),
*self._check_unique(),
]
def _check_on_delete(self):
on_delete = getattr(self.remote_field, "on_delete", None)
if on_delete == SET_NULL and not self.null:
return [
checks.Error(
"Field specifies on_delete=SET_NULL, but cannot be null.",
hint=(
"Set null=True argument on the field, or change the on_delete "
"rule."
),
obj=self,
id="fields.E320",
)
]
elif on_delete == SET_DEFAULT and not self.has_default():
return [
checks.Error(
"Field specifies on_delete=SET_DEFAULT, but has no default value.",
hint="Set a default value, or change the on_delete rule.",
obj=self,
id="fields.E321",
)
]
else:
return []
def _check_unique(self, **kwargs):
return (
[
checks.Warning(
"Setting unique=True on a ForeignKey has the same effect as using "
"a OneToOneField.",
hint=(
"ForeignKey(unique=True) is usually better served by a "
"OneToOneField."
),
obj=self,
id="fields.W342",
)
]
if self.unique
else []
)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["to_fields"]
del kwargs["from_fields"]
# Handle the simpler arguments
if self.db_index:
del kwargs["db_index"]
else:
kwargs["db_index"] = False
if self.db_constraint is not True:
kwargs["db_constraint"] = self.db_constraint
# Rel needs more work.
to_meta = getattr(self.remote_field.model, "_meta", None)
if self.remote_field.field_name and (
not to_meta
or (to_meta.pk and self.remote_field.field_name != to_meta.pk.name)
):
kwargs["to_field"] = self.remote_field.field_name
return name, path, args, kwargs
def to_python(self, value):
return self.target_field.to_python(value)
@property
def target_field(self):
return self.foreign_related_fields[0]
def validate(self, value, model_instance):
if self.remote_field.parent_link:
return
super().validate(value, model_instance)
if value is None:
return
using = router.db_for_read(self.remote_field.model, instance=model_instance)
qs = self.remote_field.model._base_manager.using(using).filter(
**{self.remote_field.field_name: value}
)
qs = qs.complex_filter(self.get_limit_choices_to())
if not qs.exists():
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={
"model": self.remote_field.model._meta.verbose_name,
"pk": value,
"field": self.remote_field.field_name,
"value": value,
}, # 'pk' is included for backwards compatibility
)
def resolve_related_fields(self):
related_fields = super().resolve_related_fields()
for from_field, to_field in related_fields:
if (
to_field
and to_field.model != self.remote_field.model._meta.concrete_model
):
raise exceptions.FieldError(
"'%s.%s' refers to field '%s' which is not local to model "
"'%s'."
% (
self.model._meta.label,
self.name,
to_field.name,
self.remote_field.model._meta.concrete_model._meta.label,
)
)
return related_fields
def get_attname(self):
return "%s_id" % self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_default(self):
"""Return the to_field if the default value is an object."""
field_default = super().get_default()
if isinstance(field_default, self.remote_field.model):
return getattr(field_default, self.target_field.attname)
return field_default
def get_db_prep_save(self, value, connection):
if value is None or (
value == ""
and (
not self.target_field.empty_strings_allowed
or connection.features.interprets_empty_strings_as_nulls
)
):
return None
else:
return self.target_field.get_db_prep_save(value, connection=connection)
def get_db_prep_value(self, value, connection, prepared=False):
return self.target_field.get_db_prep_value(value, connection, prepared)
def get_prep_value(self, value):
return self.target_field.get_prep_value(value)
def contribute_to_related_class(self, cls, related):
super().contribute_to_related_class(cls, related)
if self.remote_field.field_name is None:
self.remote_field.field_name = cls._meta.pk.name
def formfield(self, *, using=None, **kwargs):
if isinstance(self.remote_field.model, str):
raise ValueError(
"Cannot create form field for %r yet, because "
"its related model %r has not been loaded yet"
% (self.name, self.remote_field.model)
)
return super().formfield(
**{
"form_class": forms.ModelChoiceField,
"queryset": self.remote_field.model._default_manager.using(using),
"to_field_name": self.remote_field.field_name,
**kwargs,
"blank": self.blank,
}
)
def db_check(self, connection):
return None
def db_type(self, connection):
return self.target_field.rel_db_type(connection=connection)
def db_parameters(self, connection):
target_db_parameters = self.target_field.db_parameters(connection)
return {
"type": self.db_type(connection),
"check": self.db_check(connection),
"collation": target_db_parameters.get("collation"),
}
def convert_empty_strings(self, value, expression, connection):
if (not value) and isinstance(value, str):
return None
return value
def get_db_converters(self, connection):
converters = super().get_db_converters(connection)
if connection.features.interprets_empty_strings_as_nulls:
converters += [self.convert_empty_strings]
return converters
def get_col(self, alias, output_field=None):
if output_field is None:
output_field = self.target_field
while isinstance(output_field, ForeignKey):
output_field = output_field.target_field
if output_field is self:
raise ValueError("Cannot resolve output_field.")
return super().get_col(alias, output_field)
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that it always carries a "unique" constraint with it and the reverse
relation always returns the object pointed to (since there will only ever
be one), rather than returning a list.
"""
# Field flags
many_to_many = False
many_to_one = False
one_to_many = False
one_to_one = True
related_accessor_class = ReverseOneToOneDescriptor
forward_related_accessor_class = ForwardOneToOneDescriptor
rel_class = OneToOneRel
description = _("One-to-one relationship")
def __init__(self, to, on_delete, to_field=None, **kwargs):
kwargs["unique"] = True
super().__init__(to, on_delete, to_field=to_field, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if "unique" in kwargs:
del kwargs["unique"]
return name, path, args, kwargs
def formfield(self, **kwargs):
if self.remote_field.parent_link:
return None
return super().formfield(**kwargs)
def save_form_data(self, instance, data):
if isinstance(data, self.remote_field.model):
setattr(instance, self.name, data)
else:
setattr(instance, self.attname, data)
# Remote field object must be cleared otherwise Model.save()
# will reassign attname using the related object pk.
if data is None:
setattr(instance, self.name, data)
def _check_unique(self, **kwargs):
# Override ForeignKey since check isn't applicable here.
return []
def create_many_to_many_intermediary_model(field, klass):
from django.db import models
def set_managed(model, related, through):
through._meta.managed = model._meta.managed or related._meta.managed
to_model = resolve_relation(klass, field.remote_field.model)
name = "%s_%s" % (klass._meta.object_name, field.name)
lazy_related_operation(set_managed, klass, to_model, name)
to = make_model_tuple(to_model)[1]
from_ = klass._meta.model_name
if to == from_:
to = "to_%s" % to
from_ = "from_%s" % from_
meta = type(
"Meta",
(),
{
"db_table": field._get_m2m_db_table(klass._meta),
"auto_created": klass,
"app_label": klass._meta.app_label,
"db_tablespace": klass._meta.db_tablespace,
"unique_together": (from_, to),
"verbose_name": _("%(from)s-%(to)s relationship")
% {"from": from_, "to": to},
"verbose_name_plural": _("%(from)s-%(to)s relationships")
% {"from": from_, "to": to},
"apps": field.model._meta.apps,
},
)
# Construct and return the new class.
return type(
name,
(models.Model,),
{
"Meta": meta,
"__module__": klass.__module__,
from_: models.ForeignKey(
klass,
related_name="%s+" % name,
db_tablespace=field.db_tablespace,
db_constraint=field.remote_field.db_constraint,
on_delete=CASCADE,
),
to: models.ForeignKey(
to_model,
related_name="%s+" % name,
db_tablespace=field.db_tablespace,
db_constraint=field.remote_field.db_constraint,
on_delete=CASCADE,
),
},
)
class ManyToManyField(RelatedField):
"""
Provide a many-to-many relation by using an intermediary model that
holds two ForeignKey fields pointed at the two sides of the relation.
Unless a ``through`` model was provided, ManyToManyField will use the
create_many_to_many_intermediary_model factory to automatically generate
the intermediary model.
"""
# Field flags
many_to_many = True
many_to_one = False
one_to_many = False
one_to_one = False
rel_class = ManyToManyRel
description = _("Many-to-many relationship")
def __init__(
self,
to,
related_name=None,
related_query_name=None,
limit_choices_to=None,
symmetrical=None,
through=None,
through_fields=None,
db_constraint=True,
db_table=None,
swappable=True,
**kwargs,
):
try:
to._meta
except AttributeError:
if not isinstance(to, str):
raise TypeError(
"%s(%r) is invalid. First parameter to ManyToManyField "
"must be either a model, a model name, or the string %r"
% (
self.__class__.__name__,
to,
RECURSIVE_RELATIONSHIP_CONSTANT,
)
)
if symmetrical is None:
symmetrical = to == RECURSIVE_RELATIONSHIP_CONSTANT
if through is not None and db_table is not None:
raise ValueError(
"Cannot specify a db_table if an intermediary model is used."
)
kwargs["rel"] = self.rel_class(
self,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
symmetrical=symmetrical,
through=through,
through_fields=through_fields,
db_constraint=db_constraint,
)
self.has_null_arg = "null" in kwargs
super().__init__(
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
**kwargs,
)
self.db_table = db_table
self.swappable = swappable
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_unique(**kwargs),
*self._check_relationship_model(**kwargs),
*self._check_ignored_options(**kwargs),
*self._check_table_uniqueness(**kwargs),
]
def _check_unique(self, **kwargs):
if self.unique:
return [
checks.Error(
"ManyToManyFields cannot be unique.",
obj=self,
id="fields.E330",
)
]
return []
def _check_ignored_options(self, **kwargs):
warnings = []
if self.has_null_arg:
warnings.append(
checks.Warning(
"null has no effect on ManyToManyField.",
obj=self,
id="fields.W340",
)
)
if self._validators:
warnings.append(
checks.Warning(
"ManyToManyField does not support validators.",
obj=self,
id="fields.W341",
)
)
if self.remote_field.symmetrical and self._related_name:
warnings.append(
checks.Warning(
"related_name has no effect on ManyToManyField "
'with a symmetrical relationship, e.g. to "self".',
obj=self,
id="fields.W345",
)
)
if self.db_comment:
warnings.append(
checks.Warning(
"db_comment has no effect on ManyToManyField.",
obj=self,
id="fields.W346",
)
)
return warnings
def _check_relationship_model(self, from_model=None, **kwargs):
if hasattr(self.remote_field.through, "_meta"):
qualified_model_name = "%s.%s" % (
self.remote_field.through._meta.app_label,
self.remote_field.through.__name__,
)
else:
qualified_model_name = self.remote_field.through
errors = []
if self.remote_field.through not in self.opts.apps.get_models(
include_auto_created=True
):
# The relationship model is not installed.
errors.append(
checks.Error(
"Field specifies a many-to-many relation through model "
"'%s', which has not been installed." % qualified_model_name,
obj=self,
id="fields.E331",
)
)
else:
assert from_model is not None, (
"ManyToManyField with intermediate "
"tables cannot be checked if you don't pass the model "
"where the field is attached to."
)
# Set some useful local variables
to_model = resolve_relation(from_model, self.remote_field.model)
from_model_name = from_model._meta.object_name
if isinstance(to_model, str):
to_model_name = to_model
else:
to_model_name = to_model._meta.object_name
relationship_model_name = self.remote_field.through._meta.object_name
self_referential = from_model == to_model
# Count foreign keys in intermediate model
if self_referential:
seen_self = sum(
from_model == getattr(field.remote_field, "model", None)
for field in self.remote_field.through._meta.fields
)
if seen_self > 2 and not self.remote_field.through_fields:
errors.append(
checks.Error(
"The model is used as an intermediate model by "
"'%s', but it has more than two foreign keys "
"to '%s', which is ambiguous. You must specify "
"which two foreign keys Django should use via the "
"through_fields keyword argument."
% (self, from_model_name),
hint=(
"Use through_fields to specify which two foreign keys "
"Django should use."
),
obj=self.remote_field.through,
id="fields.E333",
)
)
else:
# Count foreign keys in relationship model
seen_from = sum(
from_model == getattr(field.remote_field, "model", None)
for field in self.remote_field.through._meta.fields
)
seen_to = sum(
to_model == getattr(field.remote_field, "model", None)
for field in self.remote_field.through._meta.fields
)
if seen_from > 1 and not self.remote_field.through_fields:
errors.append(
checks.Error(
(
"The model is used as an intermediate model by "
"'%s', but it has more than one foreign key "
"from '%s', which is ambiguous. You must specify "
"which foreign key Django should use via the "
"through_fields keyword argument."
)
% (self, from_model_name),
hint=(
"If you want to create a recursive relationship, "
'use ManyToManyField("%s", through="%s").'
)
% (
RECURSIVE_RELATIONSHIP_CONSTANT,
relationship_model_name,
),
obj=self,
id="fields.E334",
)
)
if seen_to > 1 and not self.remote_field.through_fields:
errors.append(
checks.Error(
"The model is used as an intermediate model by "
"'%s', but it has more than one foreign key "
"to '%s', which is ambiguous. You must specify "
"which foreign key Django should use via the "
"through_fields keyword argument." % (self, to_model_name),
hint=(
"If you want to create a recursive relationship, "
'use ManyToManyField("%s", through="%s").'
)
% (
RECURSIVE_RELATIONSHIP_CONSTANT,
relationship_model_name,
),
obj=self,
id="fields.E335",
)
)
if seen_from == 0 or seen_to == 0:
errors.append(
checks.Error(
"The model is used as an intermediate model by "
"'%s', but it does not have a foreign key to '%s' or '%s'."
% (self, from_model_name, to_model_name),
obj=self.remote_field.through,
id="fields.E336",
)
)
# Validate `through_fields`.
if self.remote_field.through_fields is not None:
# Validate that we're given an iterable of at least two items
# and that none of them is "falsy".
if not (
len(self.remote_field.through_fields) >= 2
and self.remote_field.through_fields[0]
and self.remote_field.through_fields[1]
):
errors.append(
checks.Error(
"Field specifies 'through_fields' but does not provide "
"the names of the two link fields that should be used "
"for the relation through model '%s'." % qualified_model_name,
hint=(
"Make sure you specify 'through_fields' as "
"through_fields=('field1', 'field2')"
),
obj=self,
id="fields.E337",
)
)
# Validate the given through fields -- they should be actual
# fields on the through model, and also be foreign keys to the
# expected models.
else:
assert from_model is not None, (
"ManyToManyField with intermediate "
"tables cannot be checked if you don't pass the model "
"where the field is attached to."
)
source, through, target = (
from_model,
self.remote_field.through,
self.remote_field.model,
)
source_field_name, target_field_name = self.remote_field.through_fields[
:2
]
for field_name, related_model in (
(source_field_name, source),
(target_field_name, target),
):
possible_field_names = []
for f in through._meta.fields:
if (
hasattr(f, "remote_field")
and getattr(f.remote_field, "model", None) == related_model
):
possible_field_names.append(f.name)
if possible_field_names:
hint = (
"Did you mean one of the following foreign keys to '%s': "
"%s?"
% (
related_model._meta.object_name,
", ".join(possible_field_names),
)
)
else:
hint = None
try:
field = through._meta.get_field(field_name)
except exceptions.FieldDoesNotExist:
errors.append(
checks.Error(
"The intermediary model '%s' has no field '%s'."
% (qualified_model_name, field_name),
hint=hint,
obj=self,
id="fields.E338",
)
)
else:
if not (
hasattr(field, "remote_field")
and getattr(field.remote_field, "model", None)
== related_model
):
errors.append(
checks.Error(
"'%s.%s' is not a foreign key to '%s'."
% (
through._meta.object_name,
field_name,
related_model._meta.object_name,
),
hint=hint,
obj=self,
id="fields.E339",
)
)
return errors
def _check_table_uniqueness(self, **kwargs):
if (
isinstance(self.remote_field.through, str)
or not self.remote_field.through._meta.managed
):
return []
registered_tables = {
model._meta.db_table: model
for model in self.opts.apps.get_models(include_auto_created=True)
if model != self.remote_field.through and model._meta.managed
}
m2m_db_table = self.m2m_db_table()
model = registered_tables.get(m2m_db_table)
# The second condition allows multiple m2m relations on a model if
# some point to a through model that proxies another through model.
if (
model
and model._meta.concrete_model
!= self.remote_field.through._meta.concrete_model
):
if model._meta.auto_created:
def _get_field_name(model):
for field in model._meta.auto_created._meta.many_to_many:
if field.remote_field.through is model:
return field.name
opts = model._meta.auto_created._meta
clashing_obj = "%s.%s" % (opts.label, _get_field_name(model))
else:
clashing_obj = model._meta.label
if settings.DATABASE_ROUTERS:
error_class, error_id = checks.Warning, "fields.W344"
error_hint = (
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of %r is correctly routed to a separate "
"database." % clashing_obj
)
else:
error_class, error_id = checks.Error, "fields.E340"
error_hint = None
return [
error_class(
"The field's intermediary table '%s' clashes with the "
"table name of '%s'." % (m2m_db_table, clashing_obj),
obj=self,
hint=error_hint,
id=error_id,
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# Handle the simpler arguments.
if self.db_table is not None:
kwargs["db_table"] = self.db_table
if self.remote_field.db_constraint is not True:
kwargs["db_constraint"] = self.remote_field.db_constraint
# Lowercase model names as they should be treated as case-insensitive.
if isinstance(self.remote_field.model, str):
if "." in self.remote_field.model:
app_label, model_name = self.remote_field.model.split(".")
kwargs["to"] = "%s.%s" % (app_label, model_name.lower())
else:
kwargs["to"] = self.remote_field.model.lower()
else:
kwargs["to"] = self.remote_field.model._meta.label_lower
if getattr(self.remote_field, "through", None) is not None:
if isinstance(self.remote_field.through, str):
kwargs["through"] = self.remote_field.through
elif not self.remote_field.through._meta.auto_created:
kwargs["through"] = self.remote_field.through._meta.label
# If swappable is True, then see if we're actually pointing to the target
# of a swap.
swappable_setting = self.swappable_setting
if swappable_setting is not None:
# If it's already a settings reference, error.
if hasattr(kwargs["to"], "setting_name"):
if kwargs["to"].setting_name != swappable_setting:
raise ValueError(
"Cannot deconstruct a ManyToManyField pointing to a "
"model that is swapped in place of more than one model "
"(%s and %s)" % (kwargs["to"].setting_name, swappable_setting)
)
kwargs["to"] = SettingsReference(
kwargs["to"],
swappable_setting,
)
return name, path, args, kwargs
def _get_path_info(self, direct=False, filtered_relation=None):
"""Called by both direct and indirect m2m traversal."""
int_model = self.remote_field.through
linkfield1 = int_model._meta.get_field(self.m2m_field_name())
linkfield2 = int_model._meta.get_field(self.m2m_reverse_field_name())
if direct:
join1infos = linkfield1.reverse_path_infos
if filtered_relation:
join2infos = linkfield2.get_path_info(filtered_relation)
else:
join2infos = linkfield2.path_infos
else:
join1infos = linkfield2.reverse_path_infos
if filtered_relation:
join2infos = linkfield1.get_path_info(filtered_relation)
else:
join2infos = linkfield1.path_infos
# Get join infos between the last model of join 1 and the first model
# of join 2. Assume the only reason these may differ is due to model
# inheritance.
join1_final = join1infos[-1].to_opts
join2_initial = join2infos[0].from_opts
if join1_final is join2_initial:
intermediate_infos = []
elif issubclass(join1_final.model, join2_initial.model):
intermediate_infos = join1_final.get_path_to_parent(join2_initial.model)
else:
intermediate_infos = join2_initial.get_path_from_parent(join1_final.model)
return [*join1infos, *intermediate_infos, *join2infos]
def get_path_info(self, filtered_relation=None):
return self._get_path_info(direct=True, filtered_relation=filtered_relation)
@cached_property
def path_infos(self):
return self.get_path_info()
def get_reverse_path_info(self, filtered_relation=None):
return self._get_path_info(direct=False, filtered_relation=filtered_relation)
@cached_property
def reverse_path_infos(self):
return self.get_reverse_path_info()
def _get_m2m_db_table(self, opts):
"""
Function that can be curried to provide the m2m table name for this
relation.
"""
if self.remote_field.through is not None:
return self.remote_field.through._meta.db_table
elif self.db_table:
return self.db_table
else:
m2m_table_name = "%s_%s" % (utils.strip_quotes(opts.db_table), self.name)
return utils.truncate_name(m2m_table_name, connection.ops.max_name_length())
def _get_m2m_attr(self, related, attr):
"""
Function that can be curried to provide the source accessor or DB
column name for the m2m table.
"""
cache_attr = "_m2m_%s_cache" % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
if self.remote_field.through_fields is not None:
link_field_name = self.remote_field.through_fields[0]
else:
link_field_name = None
for f in self.remote_field.through._meta.fields:
if (
f.is_relation
and f.remote_field.model == related.related_model
and (link_field_name is None or link_field_name == f.name)
):
setattr(self, cache_attr, getattr(f, attr))
return getattr(self, cache_attr)
def _get_m2m_reverse_attr(self, related, attr):
"""
Function that can be curried to provide the related accessor or DB
column name for the m2m table.
"""
cache_attr = "_m2m_reverse_%s_cache" % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
found = False
if self.remote_field.through_fields is not None:
link_field_name = self.remote_field.through_fields[1]
else:
link_field_name = None
for f in self.remote_field.through._meta.fields:
if f.is_relation and f.remote_field.model == related.model:
if link_field_name is None and related.related_model == related.model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
setattr(self, cache_attr, getattr(f, attr))
break
else:
found = True
elif link_field_name is None or link_field_name == f.name:
setattr(self, cache_attr, getattr(f, attr))
break
return getattr(self, cache_attr)
def contribute_to_class(self, cls, name, **kwargs):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.remote_field.symmetrical and (
self.remote_field.model == RECURSIVE_RELATIONSHIP_CONSTANT
or self.remote_field.model == cls._meta.object_name
):
self.remote_field.related_name = "%s_rel_+" % name
elif self.remote_field.is_hidden():
# If the backwards relation is disabled, replace the original
# related_name with one generated from the m2m field name. Django
# still uses backwards relations internally and we need to avoid
# clashes between multiple m2m fields with related_name == '+'.
self.remote_field.related_name = "_%s_%s_%s_+" % (
cls._meta.app_label,
cls.__name__.lower(),
name,
)
super().contribute_to_class(cls, name, **kwargs)
# The intermediate m2m model is not auto created if:
# 1) There is a manually specified intermediate, or
# 2) The class owning the m2m field is abstract.
# 3) The class owning the m2m field has been swapped out.
if not cls._meta.abstract:
if self.remote_field.through:
def resolve_through_model(_, model, field):
field.remote_field.through = model
lazy_related_operation(
resolve_through_model, cls, self.remote_field.through, field=self
)
elif not cls._meta.swapped:
self.remote_field.through = create_many_to_many_intermediary_model(
self, cls
)
# Add the descriptor for the m2m relation.
setattr(cls, self.name, ManyToManyDescriptor(self.remote_field, reverse=False))
# Set up the accessor for the m2m table name for the relation.
self.m2m_db_table = partial(self._get_m2m_db_table, cls._meta)
def contribute_to_related_class(self, cls, related):
# Internal M2Ms (i.e., those with a related name ending with '+')
# and swapped models don't get a related descriptor.
if (
not self.remote_field.is_hidden()
and not related.related_model._meta.swapped
):
setattr(
cls,
related.get_accessor_name(),
ManyToManyDescriptor(self.remote_field, reverse=True),
)
# Set up the accessors for the column names on the m2m table.
self.m2m_column_name = partial(self._get_m2m_attr, related, "column")
self.m2m_reverse_name = partial(self._get_m2m_reverse_attr, related, "column")
self.m2m_field_name = partial(self._get_m2m_attr, related, "name")
self.m2m_reverse_field_name = partial(
self._get_m2m_reverse_attr, related, "name"
)
get_m2m_rel = partial(self._get_m2m_attr, related, "remote_field")
self.m2m_target_field_name = lambda: get_m2m_rel().field_name
get_m2m_reverse_rel = partial(
self._get_m2m_reverse_attr, related, "remote_field"
)
self.m2m_reverse_target_field_name = lambda: get_m2m_reverse_rel().field_name
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
return [] if obj.pk is None else list(getattr(obj, self.attname).all())
def save_form_data(self, instance, data):
getattr(instance, self.attname).set(data)
def formfield(self, *, using=None, **kwargs):
defaults = {
"form_class": forms.ModelMultipleChoiceField,
"queryset": self.remote_field.model._default_manager.using(using),
**kwargs,
}
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get("initial") is not None:
initial = defaults["initial"]
if callable(initial):
initial = initial()
defaults["initial"] = [i.pk for i in initial]
return super().formfield(**defaults)
def db_check(self, connection):
return None
def db_type(self, connection):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
def db_parameters(self, connection):
return {"type": None, "check": None}
|
8a782a8e3e09d5d60527775d3bd4070d9d2fe0cc322a05c9f1dca78cbe2bae76 | import json
import warnings
from django import forms
from django.core import checks, exceptions
from django.db import NotSupportedError, connections, router
from django.db.models import expressions, lookups
from django.db.models.constants import LOOKUP_SEP
from django.db.models.fields import TextField
from django.db.models.lookups import (
FieldGetDbPrepValueMixin,
PostgresOperatorLookup,
Transform,
)
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.translation import gettext_lazy as _
from . import Field
from .mixins import CheckFieldDefaultMixin
__all__ = ["JSONField"]
class JSONField(CheckFieldDefaultMixin, Field):
empty_strings_allowed = False
description = _("A JSON object")
default_error_messages = {
"invalid": _("Value must be valid JSON."),
}
_default_hint = ("dict", "{}")
def __init__(
self,
verbose_name=None,
name=None,
encoder=None,
decoder=None,
**kwargs,
):
if encoder and not callable(encoder):
raise ValueError("The encoder parameter must be a callable object.")
if decoder and not callable(decoder):
raise ValueError("The decoder parameter must be a callable object.")
self.encoder = encoder
self.decoder = decoder
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
databases = kwargs.get("databases") or []
errors.extend(self._check_supported(databases))
return errors
def _check_supported(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if (
self.model._meta.required_db_vendor
and self.model._meta.required_db_vendor != connection.vendor
):
continue
if not (
"supports_json_field" in self.model._meta.required_db_features
or connection.features.supports_json_field
):
errors.append(
checks.Error(
"%s does not support JSONFields." % connection.display_name,
obj=self.model,
id="fields.E180",
)
)
return errors
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.encoder is not None:
kwargs["encoder"] = self.encoder
if self.decoder is not None:
kwargs["decoder"] = self.decoder
return name, path, args, kwargs
def from_db_value(self, value, expression, connection):
if value is None:
return value
# Some backends (SQLite at least) extract non-string values in their
# SQL datatypes.
if isinstance(expression, KeyTransform) and not isinstance(value, str):
return value
try:
return json.loads(value, cls=self.decoder)
except json.JSONDecodeError:
return value
def get_internal_type(self):
return "JSONField"
def get_db_prep_value(self, value, connection, prepared=False):
# RemovedInDjango51Warning: When the deprecation ends, replace with:
# if (
# isinstance(value, expressions.Value)
# and isinstance(value.output_field, JSONField)
# ):
# value = value.value
# elif hasattr(value, "as_sql"): ...
if isinstance(value, expressions.Value):
if isinstance(value.value, str) and not isinstance(
value.output_field, JSONField
):
try:
value = json.loads(value.value, cls=self.decoder)
except json.JSONDecodeError:
value = value.value
else:
warnings.warn(
"Providing an encoded JSON string via Value() is deprecated. "
f"Use Value({value!r}, output_field=JSONField()) instead.",
category=RemovedInDjango51Warning,
)
elif isinstance(value.output_field, JSONField):
value = value.value
else:
return value
elif hasattr(value, "as_sql"):
return value
return connection.ops.adapt_json_value(value, self.encoder)
def get_db_prep_save(self, value, connection):
if value is None:
return value
return self.get_db_prep_value(value, connection)
def get_transform(self, name):
transform = super().get_transform(name)
if transform:
return transform
return KeyTransformFactory(name)
def validate(self, value, model_instance):
super().validate(value, model_instance)
try:
json.dumps(value, cls=self.encoder)
except TypeError:
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def value_to_string(self, obj):
return self.value_from_object(obj)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.JSONField,
"encoder": self.encoder,
"decoder": self.decoder,
**kwargs,
}
)
def compile_json_path(key_transforms, include_root=True):
path = ["$"] if include_root else []
for key_transform in key_transforms:
try:
num = int(key_transform)
except ValueError: # non-integer
path.append(".")
path.append(json.dumps(key_transform))
else:
path.append("[%s]" % num)
return "".join(path)
class DataContains(FieldGetDbPrepValueMixin, PostgresOperatorLookup):
lookup_name = "contains"
postgres_operator = "@>"
def as_sql(self, compiler, connection):
if not connection.features.supports_json_field_contains:
raise NotSupportedError(
"contains lookup is not supported on this database backend."
)
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = tuple(lhs_params) + tuple(rhs_params)
return "JSON_CONTAINS(%s, %s)" % (lhs, rhs), params
class ContainedBy(FieldGetDbPrepValueMixin, PostgresOperatorLookup):
lookup_name = "contained_by"
postgres_operator = "<@"
def as_sql(self, compiler, connection):
if not connection.features.supports_json_field_contains:
raise NotSupportedError(
"contained_by lookup is not supported on this database backend."
)
lhs, lhs_params = self.process_lhs(compiler, connection)
rhs, rhs_params = self.process_rhs(compiler, connection)
params = tuple(rhs_params) + tuple(lhs_params)
return "JSON_CONTAINS(%s, %s)" % (rhs, lhs), params
class HasKeyLookup(PostgresOperatorLookup):
logical_operator = None
def compile_json_path_final_key(self, key_transform):
# Compile the final key without interpreting ints as array elements.
return ".%s" % json.dumps(key_transform)
def as_sql(self, compiler, connection, template=None):
# Process JSON path from the left-hand side.
if isinstance(self.lhs, KeyTransform):
lhs, lhs_params, lhs_key_transforms = self.lhs.preprocess_lhs(
compiler, connection
)
lhs_json_path = compile_json_path(lhs_key_transforms)
else:
lhs, lhs_params = self.process_lhs(compiler, connection)
lhs_json_path = "$"
sql = template % lhs
# Process JSON path from the right-hand side.
rhs = self.rhs
rhs_params = []
if not isinstance(rhs, (list, tuple)):
rhs = [rhs]
for key in rhs:
if isinstance(key, KeyTransform):
*_, rhs_key_transforms = key.preprocess_lhs(compiler, connection)
else:
rhs_key_transforms = [key]
*rhs_key_transforms, final_key = rhs_key_transforms
rhs_json_path = compile_json_path(rhs_key_transforms, include_root=False)
rhs_json_path += self.compile_json_path_final_key(final_key)
rhs_params.append(lhs_json_path + rhs_json_path)
# Add condition for each key.
if self.logical_operator:
sql = "(%s)" % self.logical_operator.join([sql] * len(rhs_params))
return sql, tuple(lhs_params) + tuple(rhs_params)
def as_mysql(self, compiler, connection):
return self.as_sql(
compiler, connection, template="JSON_CONTAINS_PATH(%s, 'one', %%s)"
)
def as_oracle(self, compiler, connection):
sql, params = self.as_sql(
compiler, connection, template="JSON_EXISTS(%s, '%%s')"
)
# Add paths directly into SQL because path expressions cannot be passed
# as bind variables on Oracle.
return sql % tuple(params), []
def as_postgresql(self, compiler, connection):
if isinstance(self.rhs, KeyTransform):
*_, rhs_key_transforms = self.rhs.preprocess_lhs(compiler, connection)
for key in rhs_key_transforms[:-1]:
self.lhs = KeyTransform(key, self.lhs)
self.rhs = rhs_key_transforms[-1]
return super().as_postgresql(compiler, connection)
def as_sqlite(self, compiler, connection):
return self.as_sql(
compiler, connection, template="JSON_TYPE(%s, %%s) IS NOT NULL"
)
class HasKey(HasKeyLookup):
lookup_name = "has_key"
postgres_operator = "?"
prepare_rhs = False
class HasKeys(HasKeyLookup):
lookup_name = "has_keys"
postgres_operator = "?&"
logical_operator = " AND "
def get_prep_lookup(self):
return [str(item) for item in self.rhs]
class HasAnyKeys(HasKeys):
lookup_name = "has_any_keys"
postgres_operator = "?|"
logical_operator = " OR "
class HasKeyOrArrayIndex(HasKey):
def compile_json_path_final_key(self, key_transform):
return compile_json_path([key_transform], include_root=False)
class CaseInsensitiveMixin:
"""
Mixin to allow case-insensitive comparison of JSON values on MySQL.
MySQL handles strings used in JSON context using the utf8mb4_bin collation.
Because utf8mb4_bin is a binary collation, comparison of JSON values is
case-sensitive.
"""
def process_lhs(self, compiler, connection):
lhs, lhs_params = super().process_lhs(compiler, connection)
if connection.vendor == "mysql":
return "LOWER(%s)" % lhs, lhs_params
return lhs, lhs_params
def process_rhs(self, compiler, connection):
rhs, rhs_params = super().process_rhs(compiler, connection)
if connection.vendor == "mysql":
return "LOWER(%s)" % rhs, rhs_params
return rhs, rhs_params
class JSONExact(lookups.Exact):
can_use_none_as_rhs = True
def process_rhs(self, compiler, connection):
rhs, rhs_params = super().process_rhs(compiler, connection)
# Treat None lookup values as null.
if rhs == "%s" and rhs_params == [None]:
rhs_params = ["null"]
if connection.vendor == "mysql":
func = ["JSON_EXTRACT(%s, '$')"] * len(rhs_params)
rhs %= tuple(func)
return rhs, rhs_params
class JSONIContains(CaseInsensitiveMixin, lookups.IContains):
pass
JSONField.register_lookup(DataContains)
JSONField.register_lookup(ContainedBy)
JSONField.register_lookup(HasKey)
JSONField.register_lookup(HasKeys)
JSONField.register_lookup(HasAnyKeys)
JSONField.register_lookup(JSONExact)
JSONField.register_lookup(JSONIContains)
class KeyTransform(Transform):
postgres_operator = "->"
postgres_nested_operator = "#>"
def __init__(self, key_name, *args, **kwargs):
super().__init__(*args, **kwargs)
self.key_name = str(key_name)
def preprocess_lhs(self, compiler, connection):
key_transforms = [self.key_name]
previous = self.lhs
while isinstance(previous, KeyTransform):
key_transforms.insert(0, previous.key_name)
previous = previous.lhs
lhs, params = compiler.compile(previous)
if connection.vendor == "oracle":
# Escape string-formatting.
key_transforms = [key.replace("%", "%%") for key in key_transforms]
return lhs, params, key_transforms
def as_mysql(self, compiler, connection):
lhs, params, key_transforms = self.preprocess_lhs(compiler, connection)
json_path = compile_json_path(key_transforms)
return "JSON_EXTRACT(%s, %%s)" % lhs, tuple(params) + (json_path,)
def as_oracle(self, compiler, connection):
lhs, params, key_transforms = self.preprocess_lhs(compiler, connection)
json_path = compile_json_path(key_transforms)
return (
"COALESCE(JSON_QUERY(%s, '%s'), JSON_VALUE(%s, '%s'))"
% ((lhs, json_path) * 2)
), tuple(params) * 2
def as_postgresql(self, compiler, connection):
lhs, params, key_transforms = self.preprocess_lhs(compiler, connection)
if len(key_transforms) > 1:
sql = "(%s %s %%s)" % (lhs, self.postgres_nested_operator)
return sql, tuple(params) + (key_transforms,)
try:
lookup = int(self.key_name)
except ValueError:
lookup = self.key_name
return "(%s %s %%s)" % (lhs, self.postgres_operator), tuple(params) + (lookup,)
def as_sqlite(self, compiler, connection):
lhs, params, key_transforms = self.preprocess_lhs(compiler, connection)
json_path = compile_json_path(key_transforms)
datatype_values = ",".join(
[repr(datatype) for datatype in connection.ops.jsonfield_datatype_values]
)
return (
"(CASE WHEN JSON_TYPE(%s, %%s) IN (%s) "
"THEN JSON_TYPE(%s, %%s) ELSE JSON_EXTRACT(%s, %%s) END)"
) % (lhs, datatype_values, lhs, lhs), (tuple(params) + (json_path,)) * 3
class KeyTextTransform(KeyTransform):
postgres_operator = "->>"
postgres_nested_operator = "#>>"
output_field = TextField()
def as_mysql(self, compiler, connection):
if connection.mysql_is_mariadb:
# MariaDB doesn't support -> and ->> operators (see MDEV-13594).
sql, params = super().as_mysql(compiler, connection)
return "JSON_UNQUOTE(%s)" % sql, params
else:
lhs, params, key_transforms = self.preprocess_lhs(compiler, connection)
json_path = compile_json_path(key_transforms)
return "(%s ->> %%s)" % lhs, tuple(params) + (json_path,)
@classmethod
def from_lookup(cls, lookup):
transform, *keys = lookup.split(LOOKUP_SEP)
if not keys:
raise ValueError("Lookup must contain key or index transforms.")
for key in keys:
transform = cls(key, transform)
return transform
KT = KeyTextTransform.from_lookup
class KeyTransformTextLookupMixin:
"""
Mixin for combining with a lookup expecting a text lhs from a JSONField
key lookup. On PostgreSQL, make use of the ->> operator instead of casting
key values to text and performing the lookup on the resulting
representation.
"""
def __init__(self, key_transform, *args, **kwargs):
if not isinstance(key_transform, KeyTransform):
raise TypeError(
"Transform should be an instance of KeyTransform in order to "
"use this lookup."
)
key_text_transform = KeyTextTransform(
key_transform.key_name,
*key_transform.source_expressions,
**key_transform.extra,
)
super().__init__(key_text_transform, *args, **kwargs)
class KeyTransformIsNull(lookups.IsNull):
# key__isnull=False is the same as has_key='key'
def as_oracle(self, compiler, connection):
sql, params = HasKeyOrArrayIndex(
self.lhs.lhs,
self.lhs.key_name,
).as_oracle(compiler, connection)
if not self.rhs:
return sql, params
# Column doesn't have a key or IS NULL.
lhs, lhs_params, _ = self.lhs.preprocess_lhs(compiler, connection)
return "(NOT %s OR %s IS NULL)" % (sql, lhs), tuple(params) + tuple(lhs_params)
def as_sqlite(self, compiler, connection):
template = "JSON_TYPE(%s, %%s) IS NULL"
if not self.rhs:
template = "JSON_TYPE(%s, %%s) IS NOT NULL"
return HasKeyOrArrayIndex(self.lhs.lhs, self.lhs.key_name).as_sql(
compiler,
connection,
template=template,
)
class KeyTransformIn(lookups.In):
def resolve_expression_parameter(self, compiler, connection, sql, param):
sql, params = super().resolve_expression_parameter(
compiler,
connection,
sql,
param,
)
if (
not hasattr(param, "as_sql")
and not connection.features.has_native_json_field
):
if connection.vendor == "oracle":
value = json.loads(param)
sql = "%s(JSON_OBJECT('value' VALUE %%s FORMAT JSON), '$.value')"
if isinstance(value, (list, dict)):
sql %= "JSON_QUERY"
else:
sql %= "JSON_VALUE"
elif connection.vendor == "mysql" or (
connection.vendor == "sqlite"
and params[0] not in connection.ops.jsonfield_datatype_values
):
sql = "JSON_EXTRACT(%s, '$')"
if connection.vendor == "mysql" and connection.mysql_is_mariadb:
sql = "JSON_UNQUOTE(%s)" % sql
return sql, params
class KeyTransformExact(JSONExact):
def process_rhs(self, compiler, connection):
if isinstance(self.rhs, KeyTransform):
return super(lookups.Exact, self).process_rhs(compiler, connection)
rhs, rhs_params = super().process_rhs(compiler, connection)
if connection.vendor == "oracle":
func = []
sql = "%s(JSON_OBJECT('value' VALUE %%s FORMAT JSON), '$.value')"
for value in rhs_params:
value = json.loads(value)
if isinstance(value, (list, dict)):
func.append(sql % "JSON_QUERY")
else:
func.append(sql % "JSON_VALUE")
rhs %= tuple(func)
elif connection.vendor == "sqlite":
func = []
for value in rhs_params:
if value in connection.ops.jsonfield_datatype_values:
func.append("%s")
else:
func.append("JSON_EXTRACT(%s, '$')")
rhs %= tuple(func)
return rhs, rhs_params
def as_oracle(self, compiler, connection):
rhs, rhs_params = super().process_rhs(compiler, connection)
if rhs_params == ["null"]:
# Field has key and it's NULL.
has_key_expr = HasKeyOrArrayIndex(self.lhs.lhs, self.lhs.key_name)
has_key_sql, has_key_params = has_key_expr.as_oracle(compiler, connection)
is_null_expr = self.lhs.get_lookup("isnull")(self.lhs, True)
is_null_sql, is_null_params = is_null_expr.as_sql(compiler, connection)
return (
"%s AND %s" % (has_key_sql, is_null_sql),
tuple(has_key_params) + tuple(is_null_params),
)
return super().as_sql(compiler, connection)
class KeyTransformIExact(
CaseInsensitiveMixin, KeyTransformTextLookupMixin, lookups.IExact
):
pass
class KeyTransformIContains(
CaseInsensitiveMixin, KeyTransformTextLookupMixin, lookups.IContains
):
pass
class KeyTransformStartsWith(KeyTransformTextLookupMixin, lookups.StartsWith):
pass
class KeyTransformIStartsWith(
CaseInsensitiveMixin, KeyTransformTextLookupMixin, lookups.IStartsWith
):
pass
class KeyTransformEndsWith(KeyTransformTextLookupMixin, lookups.EndsWith):
pass
class KeyTransformIEndsWith(
CaseInsensitiveMixin, KeyTransformTextLookupMixin, lookups.IEndsWith
):
pass
class KeyTransformRegex(KeyTransformTextLookupMixin, lookups.Regex):
pass
class KeyTransformIRegex(
CaseInsensitiveMixin, KeyTransformTextLookupMixin, lookups.IRegex
):
pass
class KeyTransformNumericLookupMixin:
def process_rhs(self, compiler, connection):
rhs, rhs_params = super().process_rhs(compiler, connection)
if not connection.features.has_native_json_field:
rhs_params = [json.loads(value) for value in rhs_params]
return rhs, rhs_params
class KeyTransformLt(KeyTransformNumericLookupMixin, lookups.LessThan):
pass
class KeyTransformLte(KeyTransformNumericLookupMixin, lookups.LessThanOrEqual):
pass
class KeyTransformGt(KeyTransformNumericLookupMixin, lookups.GreaterThan):
pass
class KeyTransformGte(KeyTransformNumericLookupMixin, lookups.GreaterThanOrEqual):
pass
KeyTransform.register_lookup(KeyTransformIn)
KeyTransform.register_lookup(KeyTransformExact)
KeyTransform.register_lookup(KeyTransformIExact)
KeyTransform.register_lookup(KeyTransformIsNull)
KeyTransform.register_lookup(KeyTransformIContains)
KeyTransform.register_lookup(KeyTransformStartsWith)
KeyTransform.register_lookup(KeyTransformIStartsWith)
KeyTransform.register_lookup(KeyTransformEndsWith)
KeyTransform.register_lookup(KeyTransformIEndsWith)
KeyTransform.register_lookup(KeyTransformRegex)
KeyTransform.register_lookup(KeyTransformIRegex)
KeyTransform.register_lookup(KeyTransformLt)
KeyTransform.register_lookup(KeyTransformLte)
KeyTransform.register_lookup(KeyTransformGt)
KeyTransform.register_lookup(KeyTransformGte)
class KeyTransformFactory:
def __init__(self, key_name):
self.key_name = key_name
def __call__(self, *args, **kwargs):
return KeyTransform(self.key_name, *args, **kwargs)
|
ad3a06f9616ea0016a1d6347872c6b08d90855f688f102085a14d27157d602e2 | from django.db import NotSupportedError
from django.db.models.expressions import Func, Value
from django.db.models.fields import CharField, IntegerField, TextField
from django.db.models.functions import Cast, Coalesce
from django.db.models.lookups import Transform
class MySQLSHA2Mixin:
def as_mysql(self, compiler, connection, **extra_content):
return super().as_sql(
compiler,
connection,
template="SHA2(%%(expressions)s, %s)" % self.function[3:],
**extra_content,
)
class OracleHashMixin:
def as_oracle(self, compiler, connection, **extra_context):
return super().as_sql(
compiler,
connection,
template=(
"LOWER(RAWTOHEX(STANDARD_HASH(UTL_I18N.STRING_TO_RAW("
"%(expressions)s, 'AL32UTF8'), '%(function)s')))"
),
**extra_context,
)
class PostgreSQLSHAMixin:
def as_postgresql(self, compiler, connection, **extra_content):
return super().as_sql(
compiler,
connection,
template="ENCODE(DIGEST(%(expressions)s, '%(function)s'), 'hex')",
function=self.function.lower(),
**extra_content,
)
class Chr(Transform):
function = "CHR"
lookup_name = "chr"
def as_mysql(self, compiler, connection, **extra_context):
return super().as_sql(
compiler,
connection,
function="CHAR",
template="%(function)s(%(expressions)s USING utf16)",
**extra_context,
)
def as_oracle(self, compiler, connection, **extra_context):
return super().as_sql(
compiler,
connection,
template="%(function)s(%(expressions)s USING NCHAR_CS)",
**extra_context,
)
def as_sqlite(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="CHAR", **extra_context)
class ConcatPair(Func):
"""
Concatenate two arguments together. This is used by `Concat` because not
all backend databases support more than two arguments.
"""
function = "CONCAT"
def as_sqlite(self, compiler, connection, **extra_context):
coalesced = self.coalesce()
return super(ConcatPair, coalesced).as_sql(
compiler,
connection,
template="%(expressions)s",
arg_joiner=" || ",
**extra_context,
)
def as_postgresql(self, compiler, connection, **extra_context):
copy = self.copy()
copy.set_source_expressions(
[
Cast(expression, TextField())
for expression in copy.get_source_expressions()
]
)
return super(ConcatPair, copy).as_sql(
compiler,
connection,
**extra_context,
)
def as_mysql(self, compiler, connection, **extra_context):
# Use CONCAT_WS with an empty separator so that NULLs are ignored.
return super().as_sql(
compiler,
connection,
function="CONCAT_WS",
template="%(function)s('', %(expressions)s)",
**extra_context,
)
def coalesce(self):
# null on either side results in null for expression, wrap with coalesce
c = self.copy()
c.set_source_expressions(
[
Coalesce(expression, Value(""))
for expression in c.get_source_expressions()
]
)
return c
class Concat(Func):
"""
Concatenate text fields together. Backends that result in an entire
null expression when any arguments are null will wrap each argument in
coalesce functions to ensure a non-null result.
"""
function = None
template = "%(expressions)s"
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError("Concat must take at least two expressions")
paired = self._paired(expressions)
super().__init__(paired, **extra)
def _paired(self, expressions):
# wrap pairs of expressions in successive concat functions
# exp = [a, b, c, d]
# -> ConcatPair(a, ConcatPair(b, ConcatPair(c, d))))
if len(expressions) == 2:
return ConcatPair(*expressions)
return ConcatPair(expressions[0], self._paired(expressions[1:]))
class Left(Func):
function = "LEFT"
arity = 2
output_field = CharField()
def __init__(self, expression, length, **extra):
"""
expression: the name of a field, or an expression returning a string
length: the number of characters to return from the start of the string
"""
if not hasattr(length, "resolve_expression"):
if length < 1:
raise ValueError("'length' must be greater than 0.")
super().__init__(expression, length, **extra)
def get_substr(self):
return Substr(self.source_expressions[0], Value(1), self.source_expressions[1])
def as_oracle(self, compiler, connection, **extra_context):
return self.get_substr().as_oracle(compiler, connection, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
return self.get_substr().as_sqlite(compiler, connection, **extra_context)
class Length(Transform):
"""Return the number of characters in the expression."""
function = "LENGTH"
lookup_name = "length"
output_field = IntegerField()
def as_mysql(self, compiler, connection, **extra_context):
return super().as_sql(
compiler, connection, function="CHAR_LENGTH", **extra_context
)
class Lower(Transform):
function = "LOWER"
lookup_name = "lower"
class LPad(Func):
function = "LPAD"
output_field = CharField()
def __init__(self, expression, length, fill_text=Value(" "), **extra):
if (
not hasattr(length, "resolve_expression")
and length is not None
and length < 0
):
raise ValueError("'length' must be greater or equal to 0.")
super().__init__(expression, length, fill_text, **extra)
class LTrim(Transform):
function = "LTRIM"
lookup_name = "ltrim"
class MD5(OracleHashMixin, Transform):
function = "MD5"
lookup_name = "md5"
class Ord(Transform):
function = "ASCII"
lookup_name = "ord"
output_field = IntegerField()
def as_mysql(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="ORD", **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="UNICODE", **extra_context)
class Repeat(Func):
function = "REPEAT"
output_field = CharField()
def __init__(self, expression, number, **extra):
if (
not hasattr(number, "resolve_expression")
and number is not None
and number < 0
):
raise ValueError("'number' must be greater or equal to 0.")
super().__init__(expression, number, **extra)
def as_oracle(self, compiler, connection, **extra_context):
expression, number = self.source_expressions
length = None if number is None else Length(expression) * number
rpad = RPad(expression, length, expression)
return rpad.as_sql(compiler, connection, **extra_context)
class Replace(Func):
function = "REPLACE"
def __init__(self, expression, text, replacement=Value(""), **extra):
super().__init__(expression, text, replacement, **extra)
class Reverse(Transform):
function = "REVERSE"
lookup_name = "reverse"
def as_oracle(self, compiler, connection, **extra_context):
# REVERSE in Oracle is undocumented and doesn't support multi-byte
# strings. Use a special subquery instead.
return super().as_sql(
compiler,
connection,
template=(
"(SELECT LISTAGG(s) WITHIN GROUP (ORDER BY n DESC) FROM "
"(SELECT LEVEL n, SUBSTR(%(expressions)s, LEVEL, 1) s "
"FROM DUAL CONNECT BY LEVEL <= LENGTH(%(expressions)s)) "
"GROUP BY %(expressions)s)"
),
**extra_context,
)
class Right(Left):
function = "RIGHT"
def get_substr(self):
return Substr(
self.source_expressions[0], self.source_expressions[1] * Value(-1)
)
class RPad(LPad):
function = "RPAD"
class RTrim(Transform):
function = "RTRIM"
lookup_name = "rtrim"
class SHA1(OracleHashMixin, PostgreSQLSHAMixin, Transform):
function = "SHA1"
lookup_name = "sha1"
class SHA224(MySQLSHA2Mixin, PostgreSQLSHAMixin, Transform):
function = "SHA224"
lookup_name = "sha224"
def as_oracle(self, compiler, connection, **extra_context):
raise NotSupportedError("SHA224 is not supported on Oracle.")
class SHA256(MySQLSHA2Mixin, OracleHashMixin, PostgreSQLSHAMixin, Transform):
function = "SHA256"
lookup_name = "sha256"
class SHA384(MySQLSHA2Mixin, OracleHashMixin, PostgreSQLSHAMixin, Transform):
function = "SHA384"
lookup_name = "sha384"
class SHA512(MySQLSHA2Mixin, OracleHashMixin, PostgreSQLSHAMixin, Transform):
function = "SHA512"
lookup_name = "sha512"
class StrIndex(Func):
"""
Return a positive integer corresponding to the 1-indexed position of the
first occurrence of a substring inside another string, or 0 if the
substring is not found.
"""
function = "INSTR"
arity = 2
output_field = IntegerField()
def as_postgresql(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="STRPOS", **extra_context)
class Substr(Func):
function = "SUBSTRING"
output_field = CharField()
def __init__(self, expression, pos, length=None, **extra):
"""
expression: the name of a field, or an expression returning a string
pos: an integer > 0, or an expression returning an integer
length: an optional number of characters to return
"""
if not hasattr(pos, "resolve_expression"):
if pos < 1:
raise ValueError("'pos' must be greater than 0")
expressions = [expression, pos]
if length is not None:
expressions.append(length)
super().__init__(*expressions, **extra)
def as_sqlite(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="SUBSTR", **extra_context)
def as_oracle(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="SUBSTR", **extra_context)
class Trim(Transform):
function = "TRIM"
lookup_name = "trim"
class Upper(Transform):
function = "UPPER"
lookup_name = "upper"
|
19523e21b2e3d9a4c5c21b6e24f37884a94d9ef5979d4b1e7c1191cf627224ed | """Database functions that do comparisons or type conversions."""
from django.db import NotSupportedError
from django.db.models.expressions import Func, Value
from django.db.models.fields import TextField
from django.db.models.fields.json import JSONField
from django.utils.regex_helper import _lazy_re_compile
class Cast(Func):
"""Coerce an expression to a new field type."""
function = "CAST"
template = "%(function)s(%(expressions)s AS %(db_type)s)"
def __init__(self, expression, output_field):
super().__init__(expression, output_field=output_field)
def as_sql(self, compiler, connection, **extra_context):
extra_context["db_type"] = self.output_field.cast_db_type(connection)
return super().as_sql(compiler, connection, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
db_type = self.output_field.db_type(connection)
if db_type in {"datetime", "time"}:
# Use strftime as datetime/time don't keep fractional seconds.
template = "strftime(%%s, %(expressions)s)"
sql, params = super().as_sql(
compiler, connection, template=template, **extra_context
)
format_string = "%H:%M:%f" if db_type == "time" else "%Y-%m-%d %H:%M:%f"
params.insert(0, format_string)
return sql, params
elif db_type == "date":
template = "date(%(expressions)s)"
return super().as_sql(
compiler, connection, template=template, **extra_context
)
return self.as_sql(compiler, connection, **extra_context)
def as_mysql(self, compiler, connection, **extra_context):
template = None
output_type = self.output_field.get_internal_type()
# MySQL doesn't support explicit cast to float.
if output_type == "FloatField":
template = "(%(expressions)s + 0.0)"
# MariaDB doesn't support explicit cast to JSON.
elif output_type == "JSONField" and connection.mysql_is_mariadb:
template = "JSON_EXTRACT(%(expressions)s, '$')"
return self.as_sql(compiler, connection, template=template, **extra_context)
def as_postgresql(self, compiler, connection, **extra_context):
# CAST would be valid too, but the :: shortcut syntax is more readable.
# 'expressions' is wrapped in parentheses in case it's a complex
# expression.
return self.as_sql(
compiler,
connection,
template="(%(expressions)s)::%(db_type)s",
**extra_context,
)
def as_oracle(self, compiler, connection, **extra_context):
if self.output_field.get_internal_type() == "JSONField":
# Oracle doesn't support explicit cast to JSON.
template = "JSON_QUERY(%(expressions)s, '$')"
return super().as_sql(
compiler, connection, template=template, **extra_context
)
return self.as_sql(compiler, connection, **extra_context)
class Coalesce(Func):
"""Return, from left to right, the first non-null expression."""
function = "COALESCE"
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError("Coalesce must take at least two expressions")
super().__init__(*expressions, **extra)
@property
def empty_result_set_value(self):
for expression in self.get_source_expressions():
result = expression.empty_result_set_value
if result is NotImplemented or result is not None:
return result
return None
def as_oracle(self, compiler, connection, **extra_context):
# Oracle prohibits mixing TextField (NCLOB) and CharField (NVARCHAR2),
# so convert all fields to NCLOB when that type is expected.
if self.output_field.get_internal_type() == "TextField":
clone = self.copy()
clone.set_source_expressions(
[
Func(expression, function="TO_NCLOB")
for expression in self.get_source_expressions()
]
)
return super(Coalesce, clone).as_sql(compiler, connection, **extra_context)
return self.as_sql(compiler, connection, **extra_context)
class Collate(Func):
function = "COLLATE"
template = "%(expressions)s %(function)s %(collation)s"
# Inspired from
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
collation_re = _lazy_re_compile(r"^[\w\-]+$")
def __init__(self, expression, collation):
if not (collation and self.collation_re.match(collation)):
raise ValueError("Invalid collation name: %r." % collation)
self.collation = collation
super().__init__(expression)
def as_sql(self, compiler, connection, **extra_context):
extra_context.setdefault("collation", connection.ops.quote_name(self.collation))
return super().as_sql(compiler, connection, **extra_context)
class Greatest(Func):
"""
Return the maximum expression.
If any expression is null the return value is database-specific:
On PostgreSQL, the maximum not-null expression is returned.
On MySQL, Oracle, and SQLite, if any expression is null, null is returned.
"""
function = "GREATEST"
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError("Greatest must take at least two expressions")
super().__init__(*expressions, **extra)
def as_sqlite(self, compiler, connection, **extra_context):
"""Use the MAX function on SQLite."""
return super().as_sqlite(compiler, connection, function="MAX", **extra_context)
class JSONObject(Func):
function = "JSON_OBJECT"
output_field = JSONField()
def __init__(self, **fields):
expressions = []
for key, value in fields.items():
expressions.extend((Value(key), value))
super().__init__(*expressions)
def as_sql(self, compiler, connection, **extra_context):
if not connection.features.has_json_object_function:
raise NotSupportedError(
"JSONObject() is not supported on this database backend."
)
return super().as_sql(compiler, connection, **extra_context)
def as_postgresql(self, compiler, connection, **extra_context):
copy = self.copy()
copy.set_source_expressions(
[
Cast(expression, TextField()) if index % 2 == 0 else expression
for index, expression in enumerate(copy.get_source_expressions())
]
)
return super(JSONObject, copy).as_sql(
compiler,
connection,
function="JSONB_BUILD_OBJECT",
**extra_context,
)
def as_oracle(self, compiler, connection, **extra_context):
class ArgJoiner:
def join(self, args):
args = [" VALUE ".join(arg) for arg in zip(args[::2], args[1::2])]
return ", ".join(args)
return self.as_sql(
compiler,
connection,
arg_joiner=ArgJoiner(),
template="%(function)s(%(expressions)s RETURNING CLOB)",
**extra_context,
)
class Least(Func):
"""
Return the minimum expression.
If any expression is null the return value is database-specific:
On PostgreSQL, return the minimum not-null expression.
On MySQL, Oracle, and SQLite, if any expression is null, return null.
"""
function = "LEAST"
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError("Least must take at least two expressions")
super().__init__(*expressions, **extra)
def as_sqlite(self, compiler, connection, **extra_context):
"""Use the MIN function on SQLite."""
return super().as_sqlite(compiler, connection, function="MIN", **extra_context)
class NullIf(Func):
function = "NULLIF"
arity = 2
def as_oracle(self, compiler, connection, **extra_context):
expression1 = self.get_source_expressions()[0]
if isinstance(expression1, Value) and expression1.value is None:
raise ValueError("Oracle does not allow Value(None) for expression1.")
return super().as_sql(compiler, connection, **extra_context)
|
e2fe6b191de7108ed735490cf5bf03925f53435ac4025015199db2fb0c18eb9b | """
Create SQL statements for QuerySets.
The code in here encapsulates all of the SQL construction so that QuerySets
themselves do not have to (and could be backed by things other than SQL
databases). The abstraction barrier only works one way: this module has to know
all about the internals of models in order to get the information it needs.
"""
import copy
import difflib
import functools
import sys
from collections import Counter, namedtuple
from collections.abc import Iterator, Mapping
from itertools import chain, count, product
from string import ascii_uppercase
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db import DEFAULT_DB_ALIAS, NotSupportedError, connections
from django.db.models.aggregates import Count
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import (
BaseExpression,
Col,
Exists,
F,
OuterRef,
Ref,
ResolvedOuterRef,
Value,
)
from django.db.models.fields import Field
from django.db.models.fields.related_lookups import MultiColSource
from django.db.models.lookups import Lookup
from django.db.models.query_utils import (
Q,
check_rel_lookup_compatibility,
refs_expression,
)
from django.db.models.sql.constants import INNER, LOUTER, ORDER_DIR, SINGLE
from django.db.models.sql.datastructures import BaseTable, Empty, Join, MultiJoin
from django.db.models.sql.where import AND, OR, ExtraWhere, NothingNode, WhereNode
from django.utils.functional import cached_property
from django.utils.regex_helper import _lazy_re_compile
from django.utils.tree import Node
__all__ = ["Query", "RawQuery"]
# Quotation marks ('"`[]), whitespace characters, semicolons, or inline
# SQL comments are forbidden in column aliases.
FORBIDDEN_ALIAS_PATTERN = _lazy_re_compile(r"['`\"\]\[;\s]|--|/\*|\*/")
# Inspired from
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
EXPLAIN_OPTIONS_PATTERN = _lazy_re_compile(r"[\w\-]+")
def get_field_names_from_opts(opts):
if opts is None:
return set()
return set(
chain.from_iterable(
(f.name, f.attname) if f.concrete else (f.name,) for f in opts.get_fields()
)
)
def get_children_from_q(q):
for child in q.children:
if isinstance(child, Node):
yield from get_children_from_q(child)
else:
yield child
JoinInfo = namedtuple(
"JoinInfo",
("final_field", "targets", "opts", "joins", "path", "transform_function"),
)
class RawQuery:
"""A single raw SQL query."""
def __init__(self, sql, using, params=()):
self.params = params
self.sql = sql
self.using = using
self.cursor = None
# Mirror some properties of a normal query so that
# the compiler can be used to process results.
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.extra_select = {}
self.annotation_select = {}
def chain(self, using):
return self.clone(using)
def clone(self, using):
return RawQuery(self.sql, using, params=self.params)
def get_columns(self):
if self.cursor is None:
self._execute_query()
converter = connections[self.using].introspection.identifier_converter
return [converter(column_meta[0]) for column_meta in self.cursor.description]
def __iter__(self):
# Always execute a new query for a new iterator.
# This could be optimized with a cache at the expense of RAM.
self._execute_query()
if not connections[self.using].features.can_use_chunked_reads:
# If the database can't use chunked reads we need to make sure we
# evaluate the entire query up front.
result = list(self.cursor)
else:
result = self.cursor
return iter(result)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
@property
def params_type(self):
if self.params is None:
return None
return dict if isinstance(self.params, Mapping) else tuple
def __str__(self):
if self.params_type is None:
return self.sql
return self.sql % self.params_type(self.params)
def _execute_query(self):
connection = connections[self.using]
# Adapt parameters to the database, as much as possible considering
# that the target type isn't known. See #17755.
params_type = self.params_type
adapter = connection.ops.adapt_unknown_value
if params_type is tuple:
params = tuple(adapter(val) for val in self.params)
elif params_type is dict:
params = {key: adapter(val) for key, val in self.params.items()}
elif params_type is None:
params = None
else:
raise RuntimeError("Unexpected params type: %s" % params_type)
self.cursor = connection.cursor()
self.cursor.execute(self.sql, params)
ExplainInfo = namedtuple("ExplainInfo", ("format", "options"))
class Query(BaseExpression):
"""A single SQL query."""
alias_prefix = "T"
empty_result_set_value = None
subq_aliases = frozenset([alias_prefix])
compiler = "SQLCompiler"
base_table_class = BaseTable
join_class = Join
default_cols = True
default_ordering = True
standard_ordering = True
filter_is_sticky = False
subquery = False
# SQL-related attributes.
# Select and related select clauses are expressions to use in the SELECT
# clause of the query. The select is used for cases where we want to set up
# the select clause to contain other than default fields (values(),
# subqueries...). Note that annotations go to annotations dictionary.
select = ()
# The group_by attribute can have one of the following forms:
# - None: no group by at all in the query
# - A tuple of expressions: group by (at least) those expressions.
# String refs are also allowed for now.
# - True: group by all select fields of the model
# See compiler.get_group_by() for details.
group_by = None
order_by = ()
low_mark = 0 # Used for offset/limit.
high_mark = None # Used for offset/limit.
distinct = False
distinct_fields = ()
select_for_update = False
select_for_update_nowait = False
select_for_update_skip_locked = False
select_for_update_of = ()
select_for_no_key_update = False
select_related = False
has_select_fields = False
# Arbitrary limit for select_related to prevents infinite recursion.
max_depth = 5
# Holds the selects defined by a call to values() or values_list()
# excluding annotation_select and extra_select.
values_select = ()
# SQL annotation-related attributes.
annotation_select_mask = None
_annotation_select_cache = None
# Set combination attributes.
combinator = None
combinator_all = False
combined_queries = ()
# These are for extensions. The contents are more or less appended verbatim
# to the appropriate clause.
extra_select_mask = None
_extra_select_cache = None
extra_tables = ()
extra_order_by = ()
# A tuple that is a set of model field names and either True, if these are
# the fields to defer, or False if these are the only fields to load.
deferred_loading = (frozenset(), True)
explain_info = None
def __init__(self, model, alias_cols=True):
self.model = model
self.alias_refcount = {}
# alias_map is the most important data structure regarding joins.
# It's used for recording which joins exist in the query and what
# types they are. The key is the alias of the joined table (possibly
# the table name) and the value is a Join-like object (see
# sql.datastructures.Join for more information).
self.alias_map = {}
# Whether to provide alias to columns during reference resolving.
self.alias_cols = alias_cols
# Sometimes the query contains references to aliases in outer queries (as
# a result of split_exclude). Correct alias quoting needs to know these
# aliases too.
# Map external tables to whether they are aliased.
self.external_aliases = {}
self.table_map = {} # Maps table names to list of aliases.
self.used_aliases = set()
self.where = WhereNode()
# Maps alias -> Annotation Expression.
self.annotations = {}
# These are for extensions. The contents are more or less appended
# verbatim to the appropriate clause.
self.extra = {} # Maps col_alias -> (col_sql, params).
self._filtered_relations = {}
@property
def output_field(self):
if len(self.select) == 1:
select = self.select[0]
return getattr(select, "target", None) or select.field
elif len(self.annotation_select) == 1:
return next(iter(self.annotation_select.values())).output_field
@cached_property
def base_table(self):
for alias in self.alias_map:
return alias
def __str__(self):
"""
Return the query as a string of SQL with the parameter values
substituted in (use sql_with_params() to see the unsubstituted string).
Parameter values won't necessarily be quoted correctly, since that is
done by the database interface at execution time.
"""
sql, params = self.sql_with_params()
return sql % params
def sql_with_params(self):
"""
Return the query as an SQL string and the parameters that will be
substituted into the query.
"""
return self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
def __deepcopy__(self, memo):
"""Limit the amount of work when a Query is deepcopied."""
result = self.clone()
memo[id(self)] = result
return result
def get_compiler(self, using=None, connection=None, elide_empty=True):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
return connection.ops.compiler(self.compiler)(
self, connection, using, elide_empty
)
def get_meta(self):
"""
Return the Options instance (the model._meta) from which to start
processing. Normally, this is self.model._meta, but it can be changed
by subclasses.
"""
if self.model:
return self.model._meta
def clone(self):
"""
Return a copy of the current Query. A lightweight alternative to
deepcopy().
"""
obj = Empty()
obj.__class__ = self.__class__
# Copy references to everything.
obj.__dict__ = self.__dict__.copy()
# Clone attributes that can't use shallow copy.
obj.alias_refcount = self.alias_refcount.copy()
obj.alias_map = self.alias_map.copy()
obj.external_aliases = self.external_aliases.copy()
obj.table_map = self.table_map.copy()
obj.where = self.where.clone()
obj.annotations = self.annotations.copy()
if self.annotation_select_mask is not None:
obj.annotation_select_mask = self.annotation_select_mask.copy()
if self.combined_queries:
obj.combined_queries = tuple(
[query.clone() for query in self.combined_queries]
)
# _annotation_select_cache cannot be copied, as doing so breaks the
# (necessary) state in which both annotations and
# _annotation_select_cache point to the same underlying objects.
# It will get re-populated in the cloned queryset the next time it's
# used.
obj._annotation_select_cache = None
obj.extra = self.extra.copy()
if self.extra_select_mask is not None:
obj.extra_select_mask = self.extra_select_mask.copy()
if self._extra_select_cache is not None:
obj._extra_select_cache = self._extra_select_cache.copy()
if self.select_related is not False:
# Use deepcopy because select_related stores fields in nested
# dicts.
obj.select_related = copy.deepcopy(obj.select_related)
if "subq_aliases" in self.__dict__:
obj.subq_aliases = self.subq_aliases.copy()
obj.used_aliases = self.used_aliases.copy()
obj._filtered_relations = self._filtered_relations.copy()
# Clear the cached_property, if it exists.
obj.__dict__.pop("base_table", None)
return obj
def chain(self, klass=None):
"""
Return a copy of the current Query that's ready for another operation.
The klass argument changes the type of the Query, e.g. UpdateQuery.
"""
obj = self.clone()
if klass and obj.__class__ != klass:
obj.__class__ = klass
if not obj.filter_is_sticky:
obj.used_aliases = set()
obj.filter_is_sticky = False
if hasattr(obj, "_setup_query"):
obj._setup_query()
return obj
def relabeled_clone(self, change_map):
clone = self.clone()
clone.change_aliases(change_map)
return clone
def _get_col(self, target, field, alias):
if not self.alias_cols:
alias = None
return target.get_col(alias, field)
def get_aggregation(self, using, aggregate_exprs):
"""
Return the dictionary with the values of the existing aggregations.
"""
if not aggregate_exprs:
return {}
aggregates = {}
for alias, aggregate_expr in aggregate_exprs.items():
self.check_alias(alias)
aggregate = aggregate_expr.resolve_expression(
self, allow_joins=True, reuse=None, summarize=True
)
if not aggregate.contains_aggregate:
raise TypeError("%s is not an aggregate expression" % alias)
aggregates[alias] = aggregate
# Existing usage of aggregation can be determined by the presence of
# selected aggregates but also by filters against aliased aggregates.
_, having, qualify = self.where.split_having_qualify()
has_existing_aggregation = (
any(
getattr(annotation, "contains_aggregate", True)
for annotation in self.annotations.values()
)
or having
)
# Decide if we need to use a subquery.
#
# Existing aggregations would cause incorrect results as
# get_aggregation() must produce just one result and thus must not use
# GROUP BY.
#
# If the query has limit or distinct, or uses set operations, then
# those operations must be done in a subquery so that the query
# aggregates on the limit and/or distinct results instead of applying
# the distinct and limit after the aggregation.
if (
isinstance(self.group_by, tuple)
or self.is_sliced
or has_existing_aggregation
or qualify
or self.distinct
or self.combinator
):
from django.db.models.sql.subqueries import AggregateQuery
inner_query = self.clone()
inner_query.subquery = True
outer_query = AggregateQuery(self.model, inner_query)
inner_query.select_for_update = False
inner_query.select_related = False
inner_query.set_annotation_mask(self.annotation_select)
# Queries with distinct_fields need ordering and when a limit is
# applied we must take the slice from the ordered query. Otherwise
# no need for ordering.
inner_query.clear_ordering(force=False)
if not inner_query.distinct:
# If the inner query uses default select and it has some
# aggregate annotations, then we must make sure the inner
# query is grouped by the main model's primary key. However,
# clearing the select clause can alter results if distinct is
# used.
if inner_query.default_cols and has_existing_aggregation:
inner_query.group_by = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
inner_query.default_cols = False
if not qualify:
# Mask existing annotations that are not referenced by
# aggregates to be pushed to the outer query unless
# filtering against window functions is involved as it
# requires complex realising.
annotation_mask = set()
for aggregate in aggregates.values():
annotation_mask |= aggregate.get_refs()
inner_query.set_annotation_mask(annotation_mask)
# Add aggregates to the outer AggregateQuery. This requires making
# sure all columns referenced by the aggregates are selected in the
# inner query. It is achieved by retrieving all column references
# by the aggregates, explicitly selecting them in the inner query,
# and making sure the aggregates are repointed to them.
col_refs = {}
for alias, aggregate in aggregates.items():
replacements = {}
for col in self._gen_cols([aggregate], resolve_refs=False):
if not (col_ref := col_refs.get(col)):
index = len(col_refs) + 1
col_alias = f"__col{index}"
col_ref = Ref(col_alias, col)
col_refs[col] = col_ref
inner_query.annotations[col_alias] = col
inner_query.append_annotation_mask([col_alias])
replacements[col] = col_ref
outer_query.annotations[alias] = aggregate.replace_expressions(
replacements
)
if (
inner_query.select == ()
and not inner_query.default_cols
and not inner_query.annotation_select_mask
):
# In case of Model.objects[0:3].count(), there would be no
# field selected in the inner query, yet we must use a subquery.
# So, make sure at least one field is selected.
inner_query.select = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
else:
outer_query = self
self.select = ()
self.default_cols = False
self.extra = {}
if self.annotations:
# Inline reference to existing annotations and mask them as
# they are unnecessary given only the summarized aggregations
# are requested.
replacements = {
Ref(alias, annotation): annotation
for alias, annotation in self.annotations.items()
}
self.annotations = {
alias: aggregate.replace_expressions(replacements)
for alias, aggregate in aggregates.items()
}
else:
self.annotations = aggregates
self.set_annotation_mask(aggregates)
empty_set_result = [
expression.empty_result_set_value
for expression in outer_query.annotation_select.values()
]
elide_empty = not any(result is NotImplemented for result in empty_set_result)
outer_query.clear_ordering(force=True)
outer_query.clear_limits()
outer_query.select_for_update = False
outer_query.select_related = False
compiler = outer_query.get_compiler(using, elide_empty=elide_empty)
result = compiler.execute_sql(SINGLE)
if result is None:
result = empty_set_result
else:
converters = compiler.get_converters(outer_query.annotation_select.values())
result = next(compiler.apply_converters((result,), converters))
return dict(zip(outer_query.annotation_select, result))
def get_count(self, using):
"""
Perform a COUNT() query using the current filter constraints.
"""
obj = self.clone()
return obj.get_aggregation(using, {"__count": Count("*")})["__count"]
def has_filters(self):
return self.where
def exists(self, limit=True):
q = self.clone()
if not (q.distinct and q.is_sliced):
if q.group_by is True:
q.add_fields(
(f.attname for f in self.model._meta.concrete_fields), False
)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
q.set_group_by(allow_aliases=False)
q.clear_select_clause()
if q.combined_queries and q.combinator == "union":
q.combined_queries = tuple(
combined_query.exists(limit=False)
for combined_query in q.combined_queries
)
q.clear_ordering(force=True)
if limit:
q.set_limits(high=1)
q.add_annotation(Value(1), "a")
return q
def has_results(self, using):
q = self.exists(using)
compiler = q.get_compiler(using=using)
return compiler.has_results()
def explain(self, using, format=None, **options):
q = self.clone()
for option_name in options:
if (
not EXPLAIN_OPTIONS_PATTERN.fullmatch(option_name)
or "--" in option_name
):
raise ValueError(f"Invalid option name: {option_name!r}.")
q.explain_info = ExplainInfo(format, options)
compiler = q.get_compiler(using=using)
return "\n".join(compiler.explain_query())
def combine(self, rhs, connector):
"""
Merge the 'rhs' query into the current one (with any 'rhs' effects
being applied *after* (that is, "to the right of") anything in the
current query. 'rhs' is not modified during a call to this function.
The 'connector' parameter describes how to connect filters from the
'rhs' query.
"""
if self.model != rhs.model:
raise TypeError("Cannot combine queries on two different base models.")
if self.is_sliced:
raise TypeError("Cannot combine queries once a slice has been taken.")
if self.distinct != rhs.distinct:
raise TypeError("Cannot combine a unique query with a non-unique query.")
if self.distinct_fields != rhs.distinct_fields:
raise TypeError("Cannot combine queries with different distinct fields.")
# If lhs and rhs shares the same alias prefix, it is possible to have
# conflicting alias changes like T4 -> T5, T5 -> T6, which might end up
# as T4 -> T6 while combining two querysets. To prevent this, change an
# alias prefix of the rhs and update current aliases accordingly,
# except if the alias is the base table since it must be present in the
# query on both sides.
initial_alias = self.get_initial_alias()
rhs.bump_prefix(self, exclude={initial_alias})
# Work out how to relabel the rhs aliases, if necessary.
change_map = {}
conjunction = connector == AND
# Determine which existing joins can be reused. When combining the
# query with AND we must recreate all joins for m2m filters. When
# combining with OR we can reuse joins. The reason is that in AND
# case a single row can't fulfill a condition like:
# revrel__col=1 & revrel__col=2
# But, there might be two different related rows matching this
# condition. In OR case a single True is enough, so single row is
# enough, too.
#
# Note that we will be creating duplicate joins for non-m2m joins in
# the AND case. The results will be correct but this creates too many
# joins. This is something that could be fixed later on.
reuse = set() if conjunction else set(self.alias_map)
joinpromoter = JoinPromoter(connector, 2, False)
joinpromoter.add_votes(
j for j in self.alias_map if self.alias_map[j].join_type == INNER
)
rhs_votes = set()
# Now, add the joins from rhs query into the new query (skipping base
# table).
rhs_tables = list(rhs.alias_map)[1:]
for alias in rhs_tables:
join = rhs.alias_map[alias]
# If the left side of the join was already relabeled, use the
# updated alias.
join = join.relabeled_clone(change_map)
new_alias = self.join(join, reuse=reuse)
if join.join_type == INNER:
rhs_votes.add(new_alias)
# We can't reuse the same join again in the query. If we have two
# distinct joins for the same connection in rhs query, then the
# combined query must have two joins, too.
reuse.discard(new_alias)
if alias != new_alias:
change_map[alias] = new_alias
if not rhs.alias_refcount[alias]:
# The alias was unused in the rhs query. Unref it so that it
# will be unused in the new query, too. We have to add and
# unref the alias so that join promotion has information of
# the join type for the unused alias.
self.unref_alias(new_alias)
joinpromoter.add_votes(rhs_votes)
joinpromoter.update_join_types(self)
# Combine subqueries aliases to ensure aliases relabelling properly
# handle subqueries when combining where and select clauses.
self.subq_aliases |= rhs.subq_aliases
# Now relabel a copy of the rhs where-clause and add it to the current
# one.
w = rhs.where.clone()
w.relabel_aliases(change_map)
self.where.add(w, connector)
# Selection columns and extra extensions are those provided by 'rhs'.
if rhs.select:
self.set_select([col.relabeled_clone(change_map) for col in rhs.select])
else:
self.select = ()
if connector == OR:
# It would be nice to be able to handle this, but the queries don't
# really make sense (or return consistent value sets). Not worth
# the extra complexity when you can write a real query instead.
if self.extra and rhs.extra:
raise ValueError(
"When merging querysets using 'or', you cannot have "
"extra(select=...) on both sides."
)
self.extra.update(rhs.extra)
extra_select_mask = set()
if self.extra_select_mask is not None:
extra_select_mask.update(self.extra_select_mask)
if rhs.extra_select_mask is not None:
extra_select_mask.update(rhs.extra_select_mask)
if extra_select_mask:
self.set_extra_mask(extra_select_mask)
self.extra_tables += rhs.extra_tables
# Ordering uses the 'rhs' ordering, unless it has none, in which case
# the current ordering is used.
self.order_by = rhs.order_by or self.order_by
self.extra_order_by = rhs.extra_order_by or self.extra_order_by
def _get_defer_select_mask(self, opts, mask, select_mask=None):
if select_mask is None:
select_mask = {}
select_mask[opts.pk] = {}
# All concrete fields that are not part of the defer mask must be
# loaded. If a relational field is encountered it gets added to the
# mask for it be considered if `select_related` and the cycle continues
# by recursively caling this function.
for field in opts.concrete_fields:
field_mask = mask.pop(field.name, None)
if field_mask is None:
select_mask.setdefault(field, {})
elif field_mask:
if not field.is_relation:
raise FieldError(next(iter(field_mask)))
field_select_mask = select_mask.setdefault(field, {})
related_model = field.remote_field.model._meta.concrete_model
self._get_defer_select_mask(
related_model._meta, field_mask, field_select_mask
)
# Remaining defer entries must be references to reverse relationships.
# The following code is expected to raise FieldError if it encounters
# a malformed defer entry.
for field_name, field_mask in mask.items():
if filtered_relation := self._filtered_relations.get(field_name):
relation = opts.get_field(filtered_relation.relation_name)
field_select_mask = select_mask.setdefault((field_name, relation), {})
field = relation.field
else:
field = opts.get_field(field_name).field
field_select_mask = select_mask.setdefault(field, {})
related_model = field.model._meta.concrete_model
self._get_defer_select_mask(
related_model._meta, field_mask, field_select_mask
)
return select_mask
def _get_only_select_mask(self, opts, mask, select_mask=None):
if select_mask is None:
select_mask = {}
select_mask[opts.pk] = {}
# Only include fields mentioned in the mask.
for field_name, field_mask in mask.items():
field = opts.get_field(field_name)
field_select_mask = select_mask.setdefault(field, {})
if field_mask:
if not field.is_relation:
raise FieldError(next(iter(field_mask)))
related_model = field.remote_field.model._meta.concrete_model
self._get_only_select_mask(
related_model._meta, field_mask, field_select_mask
)
return select_mask
def get_select_mask(self):
"""
Convert the self.deferred_loading data structure to an alternate data
structure, describing the field that *will* be loaded. This is used to
compute the columns to select from the database and also by the
QuerySet class to work out which fields are being initialized on each
model. Models that have all their fields included aren't mentioned in
the result, only those that have field restrictions in place.
"""
field_names, defer = self.deferred_loading
if not field_names:
return {}
mask = {}
for field_name in field_names:
part_mask = mask
for part in field_name.split(LOOKUP_SEP):
part_mask = part_mask.setdefault(part, {})
opts = self.get_meta()
if defer:
return self._get_defer_select_mask(opts, mask)
return self._get_only_select_mask(opts, mask)
def table_alias(self, table_name, create=False, filtered_relation=None):
"""
Return a table alias for the given table_name and whether this is a
new alias or not.
If 'create' is true, a new alias is always created. Otherwise, the
most recently created alias for the table (if one exists) is reused.
"""
alias_list = self.table_map.get(table_name)
if not create and alias_list:
alias = alias_list[0]
self.alias_refcount[alias] += 1
return alias, False
# Create a new alias for this table.
if alias_list:
alias = "%s%d" % (self.alias_prefix, len(self.alias_map) + 1)
alias_list.append(alias)
else:
# The first occurrence of a table uses the table name directly.
alias = (
filtered_relation.alias if filtered_relation is not None else table_name
)
self.table_map[table_name] = [alias]
self.alias_refcount[alias] = 1
return alias, True
def ref_alias(self, alias):
"""Increases the reference count for this alias."""
self.alias_refcount[alias] += 1
def unref_alias(self, alias, amount=1):
"""Decreases the reference count for this alias."""
self.alias_refcount[alias] -= amount
def promote_joins(self, aliases):
"""
Promote recursively the join type of given aliases and its children to
an outer join. If 'unconditional' is False, only promote the join if
it is nullable or the parent join is an outer join.
The children promotion is done to avoid join chains that contain a LOUTER
b INNER c. So, if we have currently a INNER b INNER c and a->b is promoted,
then we must also promote b->c automatically, or otherwise the promotion
of a->b doesn't actually change anything in the query results.
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type is None:
# This is the base table (first FROM entry) - this table
# isn't really joined at all in the query, so we should not
# alter its join type.
continue
# Only the first alias (skipped above) should have None join_type
assert self.alias_map[alias].join_type is not None
parent_alias = self.alias_map[alias].parent_alias
parent_louter = (
parent_alias and self.alias_map[parent_alias].join_type == LOUTER
)
already_louter = self.alias_map[alias].join_type == LOUTER
if (self.alias_map[alias].nullable or parent_louter) and not already_louter:
self.alias_map[alias] = self.alias_map[alias].promote()
# Join type of 'alias' changed, so re-examine all aliases that
# refer to this one.
aliases.extend(
join
for join in self.alias_map
if self.alias_map[join].parent_alias == alias
and join not in aliases
)
def demote_joins(self, aliases):
"""
Change join type from LOUTER to INNER for all joins in aliases.
Similarly to promote_joins(), this method must ensure no join chains
containing first an outer, then an inner join are generated. If we
are demoting b->c join in chain a LOUTER b LOUTER c then we must
demote a->b automatically, or otherwise the demotion of b->c doesn't
actually change anything in the query results. .
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type == LOUTER:
self.alias_map[alias] = self.alias_map[alias].demote()
parent_alias = self.alias_map[alias].parent_alias
if self.alias_map[parent_alias].join_type == INNER:
aliases.append(parent_alias)
def reset_refcounts(self, to_counts):
"""
Reset reference counts for aliases so that they match the value passed
in `to_counts`.
"""
for alias, cur_refcount in self.alias_refcount.copy().items():
unref_amount = cur_refcount - to_counts.get(alias, 0)
self.unref_alias(alias, unref_amount)
def change_aliases(self, change_map):
"""
Change the aliases in change_map (which maps old-alias -> new-alias),
relabelling any references to them in select columns and the where
clause.
"""
# If keys and values of change_map were to intersect, an alias might be
# updated twice (e.g. T4 -> T5, T5 -> T6, so also T4 -> T6) depending
# on their order in change_map.
assert set(change_map).isdisjoint(change_map.values())
# 1. Update references in "select" (normal columns plus aliases),
# "group by" and "where".
self.where.relabel_aliases(change_map)
if isinstance(self.group_by, tuple):
self.group_by = tuple(
[col.relabeled_clone(change_map) for col in self.group_by]
)
self.select = tuple([col.relabeled_clone(change_map) for col in self.select])
self.annotations = self.annotations and {
key: col.relabeled_clone(change_map)
for key, col in self.annotations.items()
}
# 2. Rename the alias in the internal table/alias datastructures.
for old_alias, new_alias in change_map.items():
if old_alias not in self.alias_map:
continue
alias_data = self.alias_map[old_alias].relabeled_clone(change_map)
self.alias_map[new_alias] = alias_data
self.alias_refcount[new_alias] = self.alias_refcount[old_alias]
del self.alias_refcount[old_alias]
del self.alias_map[old_alias]
table_aliases = self.table_map[alias_data.table_name]
for pos, alias in enumerate(table_aliases):
if alias == old_alias:
table_aliases[pos] = new_alias
break
self.external_aliases = {
# Table is aliased or it's being changed and thus is aliased.
change_map.get(alias, alias): (aliased or alias in change_map)
for alias, aliased in self.external_aliases.items()
}
def bump_prefix(self, other_query, exclude=None):
"""
Change the alias prefix to the next letter in the alphabet in a way
that the other query's aliases and this query's aliases will not
conflict. Even tables that previously had no alias will get an alias
after this call. To prevent changing aliases use the exclude parameter.
"""
def prefix_gen():
"""
Generate a sequence of characters in alphabetical order:
-> 'A', 'B', 'C', ...
When the alphabet is finished, the sequence will continue with the
Cartesian product:
-> 'AA', 'AB', 'AC', ...
"""
alphabet = ascii_uppercase
prefix = chr(ord(self.alias_prefix) + 1)
yield prefix
for n in count(1):
seq = alphabet[alphabet.index(prefix) :] if prefix else alphabet
for s in product(seq, repeat=n):
yield "".join(s)
prefix = None
if self.alias_prefix != other_query.alias_prefix:
# No clashes between self and outer query should be possible.
return
# Explicitly avoid infinite loop. The constant divider is based on how
# much depth recursive subquery references add to the stack. This value
# might need to be adjusted when adding or removing function calls from
# the code path in charge of performing these operations.
local_recursion_limit = sys.getrecursionlimit() // 16
for pos, prefix in enumerate(prefix_gen()):
if prefix not in self.subq_aliases:
self.alias_prefix = prefix
break
if pos > local_recursion_limit:
raise RecursionError(
"Maximum recursion depth exceeded: too many subqueries."
)
self.subq_aliases = self.subq_aliases.union([self.alias_prefix])
other_query.subq_aliases = other_query.subq_aliases.union(self.subq_aliases)
if exclude is None:
exclude = {}
self.change_aliases(
{
alias: "%s%d" % (self.alias_prefix, pos)
for pos, alias in enumerate(self.alias_map)
if alias not in exclude
}
)
def get_initial_alias(self):
"""
Return the first alias for this query, after increasing its reference
count.
"""
if self.alias_map:
alias = self.base_table
self.ref_alias(alias)
elif self.model:
alias = self.join(self.base_table_class(self.get_meta().db_table, None))
else:
alias = None
return alias
def count_active_tables(self):
"""
Return the number of tables in this query with a non-zero reference
count. After execution, the reference counts are zeroed, so tables
added in compiler will not be seen by this method.
"""
return len([1 for count in self.alias_refcount.values() if count])
def join(self, join, reuse=None, reuse_with_filtered_relation=False):
"""
Return an alias for the 'join', either reusing an existing alias for
that join or creating a new one. 'join' is either a base_table_class or
join_class.
The 'reuse' parameter can be either None which means all joins are
reusable, or it can be a set containing the aliases that can be reused.
The 'reuse_with_filtered_relation' parameter is used when computing
FilteredRelation instances.
A join is always created as LOUTER if the lhs alias is LOUTER to make
sure chains like t1 LOUTER t2 INNER t3 aren't generated. All new
joins are created as LOUTER if the join is nullable.
"""
if reuse_with_filtered_relation and reuse:
reuse_aliases = [
a for a, j in self.alias_map.items() if a in reuse and j.equals(join)
]
else:
reuse_aliases = [
a
for a, j in self.alias_map.items()
if (reuse is None or a in reuse) and j == join
]
if reuse_aliases:
if join.table_alias in reuse_aliases:
reuse_alias = join.table_alias
else:
# Reuse the most recent alias of the joined table
# (a many-to-many relation may be joined multiple times).
reuse_alias = reuse_aliases[-1]
self.ref_alias(reuse_alias)
return reuse_alias
# No reuse is possible, so we need a new alias.
alias, _ = self.table_alias(
join.table_name, create=True, filtered_relation=join.filtered_relation
)
if join.join_type:
if self.alias_map[join.parent_alias].join_type == LOUTER or join.nullable:
join_type = LOUTER
else:
join_type = INNER
join.join_type = join_type
join.table_alias = alias
self.alias_map[alias] = join
return alias
def join_parent_model(self, opts, model, alias, seen):
"""
Make sure the given 'model' is joined in the query. If 'model' isn't
a parent of 'opts' or if it is None this method is a no-op.
The 'alias' is the root alias for starting the join, 'seen' is a dict
of model -> alias of existing joins. It must also contain a mapping
of None -> some alias. This will be returned in the no-op case.
"""
if model in seen:
return seen[model]
chain = opts.get_base_chain(model)
if not chain:
return alias
curr_opts = opts
for int_model in chain:
if int_model in seen:
curr_opts = int_model._meta
alias = seen[int_model]
continue
# Proxy model have elements in base chain
# with no parents, assign the new options
# object and skip to the next base in that
# case
if not curr_opts.parents[int_model]:
curr_opts = int_model._meta
continue
link_field = curr_opts.get_ancestor_link(int_model)
join_info = self.setup_joins([link_field.name], curr_opts, alias)
curr_opts = int_model._meta
alias = seen[int_model] = join_info.joins[-1]
return alias or seen[None]
def check_alias(self, alias):
if FORBIDDEN_ALIAS_PATTERN.search(alias):
raise ValueError(
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
def add_annotation(self, annotation, alias, select=True):
"""Add a single annotation expression to the Query."""
self.check_alias(alias)
annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None)
if select:
self.append_annotation_mask([alias])
else:
self.set_annotation_mask(set(self.annotation_select).difference({alias}))
self.annotations[alias] = annotation
def resolve_expression(self, query, *args, **kwargs):
clone = self.clone()
# Subqueries need to use a different set of aliases than the outer query.
clone.bump_prefix(query)
clone.subquery = True
clone.where.resolve_expression(query, *args, **kwargs)
# Resolve combined queries.
if clone.combinator:
clone.combined_queries = tuple(
[
combined_query.resolve_expression(query, *args, **kwargs)
for combined_query in clone.combined_queries
]
)
for key, value in clone.annotations.items():
resolved = value.resolve_expression(query, *args, **kwargs)
if hasattr(resolved, "external_aliases"):
resolved.external_aliases.update(clone.external_aliases)
clone.annotations[key] = resolved
# Outer query's aliases are considered external.
for alias, table in query.alias_map.items():
clone.external_aliases[alias] = (
isinstance(table, Join)
and table.join_field.related_model._meta.db_table != alias
) or (
isinstance(table, BaseTable) and table.table_name != table.table_alias
)
return clone
def get_external_cols(self):
exprs = chain(self.annotations.values(), self.where.children)
return [
col
for col in self._gen_cols(exprs, include_external=True)
if col.alias in self.external_aliases
]
def get_group_by_cols(self, wrapper=None):
# If wrapper is referenced by an alias for an explicit GROUP BY through
# values() a reference to this expression and not the self must be
# returned to ensure external column references are not grouped against
# as well.
external_cols = self.get_external_cols()
if any(col.possibly_multivalued for col in external_cols):
return [wrapper or self]
return external_cols
def as_sql(self, compiler, connection):
# Some backends (e.g. Oracle) raise an error when a subquery contains
# unnecessary ORDER BY clause.
if (
self.subquery
and not connection.features.ignores_unnecessary_order_by_in_subqueries
):
self.clear_ordering(force=False)
for query in self.combined_queries:
query.clear_ordering(force=False)
sql, params = self.get_compiler(connection=connection).as_sql()
if self.subquery:
sql = "(%s)" % sql
return sql, params
def resolve_lookup_value(self, value, can_reuse, allow_joins):
if hasattr(value, "resolve_expression"):
value = value.resolve_expression(
self,
reuse=can_reuse,
allow_joins=allow_joins,
)
elif isinstance(value, (list, tuple)):
# The items of the iterable may be expressions and therefore need
# to be resolved independently.
values = (
self.resolve_lookup_value(sub_value, can_reuse, allow_joins)
for sub_value in value
)
type_ = type(value)
if hasattr(type_, "_make"): # namedtuple
return type_(*values)
return type_(values)
return value
def solve_lookup_type(self, lookup, summarize=False):
"""
Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains').
"""
lookup_splitted = lookup.split(LOOKUP_SEP)
if self.annotations:
annotation, expression_lookups = refs_expression(
lookup_splitted, self.annotations
)
if annotation:
expression = self.annotations[annotation]
if summarize:
expression = Ref(annotation, expression)
return expression_lookups, (), expression
_, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta())
field_parts = lookup_splitted[0 : len(lookup_splitted) - len(lookup_parts)]
if len(lookup_parts) > 1 and not field_parts:
raise FieldError(
'Invalid lookup "%s" for model %s".'
% (lookup, self.get_meta().model.__name__)
)
return lookup_parts, field_parts, False
def check_query_object_type(self, value, opts, field):
"""
Check whether the object passed while querying is of the correct type.
If not, raise a ValueError specifying the wrong object.
"""
if hasattr(value, "_meta"):
if not check_rel_lookup_compatibility(value._meta.model, opts, field):
raise ValueError(
'Cannot query "%s": Must be "%s" instance.'
% (value, opts.object_name)
)
def check_related_objects(self, field, value, opts):
"""Check the type of object passed to query relations."""
if field.is_relation:
# Check that the field and the queryset use the same model in a
# query like .filter(author=Author.objects.all()). For example, the
# opts would be Author's (from the author field) and value.model
# would be Author.objects.all() queryset's .model (Author also).
# The field is the related field on the lhs side.
if (
isinstance(value, Query)
and not value.has_select_fields
and not check_rel_lookup_compatibility(value.model, opts, field)
):
raise ValueError(
'Cannot use QuerySet for "%s": Use a QuerySet for "%s".'
% (value.model._meta.object_name, opts.object_name)
)
elif hasattr(value, "_meta"):
self.check_query_object_type(value, opts, field)
elif hasattr(value, "__iter__"):
for v in value:
self.check_query_object_type(v, opts, field)
def check_filterable(self, expression):
"""Raise an error if expression cannot be used in a WHERE clause."""
if hasattr(expression, "resolve_expression") and not getattr(
expression, "filterable", True
):
raise NotSupportedError(
expression.__class__.__name__ + " is disallowed in the filter "
"clause."
)
if hasattr(expression, "get_source_expressions"):
for expr in expression.get_source_expressions():
self.check_filterable(expr)
def build_lookup(self, lookups, lhs, rhs):
"""
Try to extract transforms and lookup from given lhs.
The lhs value is something that works like SQLExpression.
The rhs value is what the lookup is going to compare against.
The lookups is a list of names to extract using get_lookup()
and get_transform().
"""
# __exact is the default lookup if one isn't given.
*transforms, lookup_name = lookups or ["exact"]
for name in transforms:
lhs = self.try_transform(lhs, name)
# First try get_lookup() so that the lookup takes precedence if the lhs
# supports both transform and lookup for the name.
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
# A lookup wasn't found. Try to interpret the name as a transform
# and do an Exact lookup against it.
lhs = self.try_transform(lhs, lookup_name)
lookup_name = "exact"
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
return
lookup = lookup_class(lhs, rhs)
# Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
# uses of None as a query value unless the lookup supports it.
if lookup.rhs is None and not lookup.can_use_none_as_rhs:
if lookup_name not in ("exact", "iexact"):
raise ValueError("Cannot use None as a query value")
return lhs.get_lookup("isnull")(lhs, True)
# For Oracle '' is equivalent to null. The check must be done at this
# stage because join promotion can't be done in the compiler. Using
# DEFAULT_DB_ALIAS isn't nice but it's the best that can be done here.
# A similar thing is done in is_nullable(), too.
if (
lookup_name == "exact"
and lookup.rhs == ""
and connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
):
return lhs.get_lookup("isnull")(lhs, True)
return lookup
def try_transform(self, lhs, name):
"""
Helper method for build_lookup(). Try to fetch and initialize
a transform for name parameter from lhs.
"""
transform_class = lhs.get_transform(name)
if transform_class:
return transform_class(lhs)
else:
output_field = lhs.output_field.__class__
suggested_lookups = difflib.get_close_matches(
name, output_field.get_lookups()
)
if suggested_lookups:
suggestion = ", perhaps you meant %s?" % " or ".join(suggested_lookups)
else:
suggestion = "."
raise FieldError(
"Unsupported lookup '%s' for %s or join on the field not "
"permitted%s" % (name, output_field.__name__, suggestion)
)
def build_filter(
self,
filter_expr,
branch_negated=False,
current_negated=False,
can_reuse=None,
allow_joins=True,
split_subq=True,
reuse_with_filtered_relation=False,
check_filterable=True,
summarize=False,
):
"""
Build a WhereNode for a single filter clause but don't add it
to this Query. Query.add_q() will then add this filter to the where
Node.
The 'branch_negated' tells us if the current branch contains any
negations. This will be used to determine if subqueries are needed.
The 'current_negated' is used to determine if the current filter is
negated or not and this will be used to determine if IS NULL filtering
is needed.
The difference between current_negated and branch_negated is that
branch_negated is set on first negation, but current_negated is
flipped for each negation.
Note that add_filter will not do any negating itself, that is done
upper in the code by add_q().
The 'can_reuse' is a set of reusable joins for multijoins.
If 'reuse_with_filtered_relation' is True, then only joins in can_reuse
will be reused.
The method will create a filter clause that can be added to the current
query. However, if the filter isn't added to the query then the caller
is responsible for unreffing the joins used.
"""
if isinstance(filter_expr, dict):
raise FieldError("Cannot parse keyword query as dict")
if isinstance(filter_expr, Q):
return self._add_q(
filter_expr,
branch_negated=branch_negated,
current_negated=current_negated,
used_aliases=can_reuse,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
summarize=summarize,
)
if hasattr(filter_expr, "resolve_expression"):
if not getattr(filter_expr, "conditional", False):
raise TypeError("Cannot filter against a non-conditional expression.")
condition = filter_expr.resolve_expression(
self, allow_joins=allow_joins, summarize=summarize
)
if not isinstance(condition, Lookup):
condition = self.build_lookup(["exact"], condition, True)
return WhereNode([condition], connector=AND), []
arg, value = filter_expr
if not arg:
raise FieldError("Cannot parse keyword query %r" % arg)
lookups, parts, reffed_expression = self.solve_lookup_type(arg, summarize)
if check_filterable:
self.check_filterable(reffed_expression)
if not allow_joins and len(parts) > 1:
raise FieldError("Joined field references are not permitted in this query")
pre_joins = self.alias_refcount.copy()
value = self.resolve_lookup_value(value, can_reuse, allow_joins)
used_joins = {
k for k, v in self.alias_refcount.items() if v > pre_joins.get(k, 0)
}
if check_filterable:
self.check_filterable(value)
if reffed_expression:
condition = self.build_lookup(lookups, reffed_expression, value)
return WhereNode([condition], connector=AND), []
opts = self.get_meta()
alias = self.get_initial_alias()
allow_many = not branch_negated or not split_subq
try:
join_info = self.setup_joins(
parts,
opts,
alias,
can_reuse=can_reuse,
allow_many=allow_many,
reuse_with_filtered_relation=reuse_with_filtered_relation,
)
# Prevent iterator from being consumed by check_related_objects()
if isinstance(value, Iterator):
value = list(value)
self.check_related_objects(join_info.final_field, value, join_info.opts)
# split_exclude() needs to know which joins were generated for the
# lookup parts
self._lookup_joins = join_info.joins
except MultiJoin as e:
return self.split_exclude(filter_expr, can_reuse, e.names_with_path)
# Update used_joins before trimming since they are reused to determine
# which joins could be later promoted to INNER.
used_joins.update(join_info.joins)
targets, alias, join_list = self.trim_joins(
join_info.targets, join_info.joins, join_info.path
)
if can_reuse is not None:
can_reuse.update(join_list)
if join_info.final_field.is_relation:
if len(targets) == 1:
col = self._get_col(targets[0], join_info.final_field, alias)
else:
col = MultiColSource(
alias, targets, join_info.targets, join_info.final_field
)
else:
col = self._get_col(targets[0], join_info.final_field, alias)
condition = self.build_lookup(lookups, col, value)
lookup_type = condition.lookup_name
clause = WhereNode([condition], connector=AND)
require_outer = (
lookup_type == "isnull" and condition.rhs is True and not current_negated
)
if (
current_negated
and (lookup_type != "isnull" or condition.rhs is False)
and condition.rhs is not None
):
require_outer = True
if lookup_type != "isnull":
# The condition added here will be SQL like this:
# NOT (col IS NOT NULL), where the first NOT is added in
# upper layers of code. The reason for addition is that if col
# is null, then col != someval will result in SQL "unknown"
# which isn't the same as in Python. The Python None handling
# is wanted, and it can be gotten by
# (col IS NULL OR col != someval)
# <=>
# NOT (col IS NOT NULL AND col = someval).
if (
self.is_nullable(targets[0])
or self.alias_map[join_list[-1]].join_type == LOUTER
):
lookup_class = targets[0].get_lookup("isnull")
col = self._get_col(targets[0], join_info.targets[0], alias)
clause.add(lookup_class(col, False), AND)
# If someval is a nullable column, someval IS NOT NULL is
# added.
if isinstance(value, Col) and self.is_nullable(value.target):
lookup_class = value.target.get_lookup("isnull")
clause.add(lookup_class(value, False), AND)
return clause, used_joins if not require_outer else ()
def add_filter(self, filter_lhs, filter_rhs):
self.add_q(Q((filter_lhs, filter_rhs)))
def add_q(self, q_object):
"""
A preprocessor for the internal _add_q(). Responsible for doing final
join promotion.
"""
# For join promotion this case is doing an AND for the added q_object
# and existing conditions. So, any existing inner join forces the join
# type to remain inner. Existing outer joins can however be demoted.
# (Consider case where rel_a is LOUTER and rel_a__col=1 is added - if
# rel_a doesn't produce any rows, then the whole condition must fail.
# So, demotion is OK.
existing_inner = {
a for a in self.alias_map if self.alias_map[a].join_type == INNER
}
clause, _ = self._add_q(q_object, self.used_aliases)
if clause:
self.where.add(clause, AND)
self.demote_joins(existing_inner)
def build_where(self, filter_expr):
return self.build_filter(filter_expr, allow_joins=False)[0]
def clear_where(self):
self.where = WhereNode()
def _add_q(
self,
q_object,
used_aliases,
branch_negated=False,
current_negated=False,
allow_joins=True,
split_subq=True,
check_filterable=True,
summarize=False,
):
"""Add a Q-object to the current filter."""
connector = q_object.connector
current_negated ^= q_object.negated
branch_negated = branch_negated or q_object.negated
target_clause = WhereNode(connector=connector, negated=q_object.negated)
joinpromoter = JoinPromoter(
q_object.connector, len(q_object.children), current_negated
)
for child in q_object.children:
child_clause, needed_inner = self.build_filter(
child,
can_reuse=used_aliases,
branch_negated=branch_negated,
current_negated=current_negated,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
summarize=summarize,
)
joinpromoter.add_votes(needed_inner)
if child_clause:
target_clause.add(child_clause, connector)
needed_inner = joinpromoter.update_join_types(self)
return target_clause, needed_inner
def build_filtered_relation_q(
self, q_object, reuse, branch_negated=False, current_negated=False
):
"""Add a FilteredRelation object to the current filter."""
connector = q_object.connector
current_negated ^= q_object.negated
branch_negated = branch_negated or q_object.negated
target_clause = WhereNode(connector=connector, negated=q_object.negated)
for child in q_object.children:
if isinstance(child, Node):
child_clause = self.build_filtered_relation_q(
child,
reuse=reuse,
branch_negated=branch_negated,
current_negated=current_negated,
)
else:
child_clause, _ = self.build_filter(
child,
can_reuse=reuse,
branch_negated=branch_negated,
current_negated=current_negated,
allow_joins=True,
split_subq=False,
reuse_with_filtered_relation=True,
)
target_clause.add(child_clause, connector)
return target_clause
def add_filtered_relation(self, filtered_relation, alias):
filtered_relation.alias = alias
lookups = dict(get_children_from_q(filtered_relation.condition))
relation_lookup_parts, relation_field_parts, _ = self.solve_lookup_type(
filtered_relation.relation_name
)
if relation_lookup_parts:
raise ValueError(
"FilteredRelation's relation_name cannot contain lookups "
"(got %r)." % filtered_relation.relation_name
)
for lookup in chain(lookups):
lookup_parts, lookup_field_parts, _ = self.solve_lookup_type(lookup)
shift = 2 if not lookup_parts else 1
lookup_field_path = lookup_field_parts[:-shift]
for idx, lookup_field_part in enumerate(lookup_field_path):
if len(relation_field_parts) > idx:
if relation_field_parts[idx] != lookup_field_part:
raise ValueError(
"FilteredRelation's condition doesn't support "
"relations outside the %r (got %r)."
% (filtered_relation.relation_name, lookup)
)
else:
raise ValueError(
"FilteredRelation's condition doesn't support nested "
"relations deeper than the relation_name (got %r for "
"%r)." % (lookup, filtered_relation.relation_name)
)
self._filtered_relations[filtered_relation.alias] = filtered_relation
def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False):
"""
Walk the list of names and turns them into PathInfo tuples. A single
name in 'names' can generate multiple PathInfos (m2m, for example).
'names' is the path of names to travel, 'opts' is the model Options we
start the name resolving from, 'allow_many' is as for setup_joins().
If fail_on_missing is set to True, then a name that can't be resolved
will generate a FieldError.
Return a list of PathInfo tuples. In addition return the final field
(the last used join field) and target (which is a field guaranteed to
contain the same value as the final field). Finally, return those names
that weren't found (which are likely transforms and the final lookup).
"""
path, names_with_path = [], []
for pos, name in enumerate(names):
cur_names_with_path = (name, [])
if name == "pk":
name = opts.pk.name
field = None
filtered_relation = None
try:
if opts is None:
raise FieldDoesNotExist
field = opts.get_field(name)
except FieldDoesNotExist:
if name in self.annotation_select:
field = self.annotation_select[name].output_field
elif name in self._filtered_relations and pos == 0:
filtered_relation = self._filtered_relations[name]
if LOOKUP_SEP in filtered_relation.relation_name:
parts = filtered_relation.relation_name.split(LOOKUP_SEP)
filtered_relation_path, field, _, _ = self.names_to_path(
parts,
opts,
allow_many,
fail_on_missing,
)
path.extend(filtered_relation_path[:-1])
else:
field = opts.get_field(filtered_relation.relation_name)
if field is not None:
# Fields that contain one-to-many relations with a generic
# model (like a GenericForeignKey) cannot generate reverse
# relations and therefore cannot be used for reverse querying.
if field.is_relation and not field.related_model:
raise FieldError(
"Field %r does not generate an automatic reverse "
"relation and therefore cannot be used for reverse "
"querying. If it is a GenericForeignKey, consider "
"adding a GenericRelation." % name
)
try:
model = field.model._meta.concrete_model
except AttributeError:
# QuerySet.annotate() may introduce fields that aren't
# attached to a model.
model = None
else:
# We didn't find the current field, so move position back
# one step.
pos -= 1
if pos == -1 or fail_on_missing:
available = sorted(
[
*get_field_names_from_opts(opts),
*self.annotation_select,
*self._filtered_relations,
]
)
raise FieldError(
"Cannot resolve keyword '%s' into field. "
"Choices are: %s" % (name, ", ".join(available))
)
break
# Check if we need any joins for concrete inheritance cases (the
# field lives in parent, but we are currently in one of its
# children)
if opts is not None and model is not opts.model:
path_to_parent = opts.get_path_to_parent(model)
if path_to_parent:
path.extend(path_to_parent)
cur_names_with_path[1].extend(path_to_parent)
opts = path_to_parent[-1].to_opts
if hasattr(field, "path_infos"):
if filtered_relation:
pathinfos = field.get_path_info(filtered_relation)
else:
pathinfos = field.path_infos
if not allow_many:
for inner_pos, p in enumerate(pathinfos):
if p.m2m:
cur_names_with_path[1].extend(pathinfos[0 : inner_pos + 1])
names_with_path.append(cur_names_with_path)
raise MultiJoin(pos + 1, names_with_path)
last = pathinfos[-1]
path.extend(pathinfos)
final_field = last.join_field
opts = last.to_opts
targets = last.target_fields
cur_names_with_path[1].extend(pathinfos)
names_with_path.append(cur_names_with_path)
else:
# Local non-relational field.
final_field = field
targets = (field,)
if fail_on_missing and pos + 1 != len(names):
raise FieldError(
"Cannot resolve keyword %r into field. Join on '%s'"
" not permitted." % (names[pos + 1], name)
)
break
return path, final_field, targets, names[pos + 1 :]
def setup_joins(
self,
names,
opts,
alias,
can_reuse=None,
allow_many=True,
reuse_with_filtered_relation=False,
):
"""
Compute the necessary table joins for the passage through the fields
given in 'names'. 'opts' is the Options class for the current model
(which gives the table we are starting from), 'alias' is the alias for
the table to start the joining from.
The 'can_reuse' defines the reverse foreign key joins we can reuse. It
can be None in which case all joins are reusable or a set of aliases
that can be reused. Note that non-reverse foreign keys are always
reusable when using setup_joins().
The 'reuse_with_filtered_relation' can be used to force 'can_reuse'
parameter and force the relation on the given connections.
If 'allow_many' is False, then any reverse foreign key seen will
generate a MultiJoin exception.
Return the final field involved in the joins, the target field (used
for any 'where' constraint), the final 'opts' value, the joins, the
field path traveled to generate the joins, and a transform function
that takes a field and alias and is equivalent to `field.get_col(alias)`
in the simple case but wraps field transforms if they were included in
names.
The target field is the field containing the concrete value. Final
field can be something different, for example foreign key pointing to
that value. Final field is needed for example in some value
conversions (convert 'obj' in fk__id=obj to pk val using the foreign
key field for example).
"""
joins = [alias]
# The transform can't be applied yet, as joins must be trimmed later.
# To avoid making every caller of this method look up transforms
# directly, compute transforms here and create a partial that converts
# fields to the appropriate wrapped version.
def final_transformer(field, alias):
if not self.alias_cols:
alias = None
return field.get_col(alias)
# Try resolving all the names as fields first. If there's an error,
# treat trailing names as lookups until a field can be resolved.
last_field_exception = None
for pivot in range(len(names), 0, -1):
try:
path, final_field, targets, rest = self.names_to_path(
names[:pivot],
opts,
allow_many,
fail_on_missing=True,
)
except FieldError as exc:
if pivot == 1:
# The first item cannot be a lookup, so it's safe
# to raise the field error here.
raise
else:
last_field_exception = exc
else:
# The transforms are the remaining items that couldn't be
# resolved into fields.
transforms = names[pivot:]
break
for name in transforms:
def transform(field, alias, *, name, previous):
try:
wrapped = previous(field, alias)
return self.try_transform(wrapped, name)
except FieldError:
# FieldError is raised if the transform doesn't exist.
if isinstance(final_field, Field) and last_field_exception:
raise last_field_exception
else:
raise
final_transformer = functools.partial(
transform, name=name, previous=final_transformer
)
final_transformer.has_transforms = True
# Then, add the path to the query's joins. Note that we can't trim
# joins at this stage - we will need the information about join type
# of the trimmed joins.
for join in path:
if join.filtered_relation:
filtered_relation = join.filtered_relation.clone()
table_alias = filtered_relation.alias
else:
filtered_relation = None
table_alias = None
opts = join.to_opts
if join.direct:
nullable = self.is_nullable(join.join_field)
else:
nullable = True
connection = self.join_class(
opts.db_table,
alias,
table_alias,
INNER,
join.join_field,
nullable,
filtered_relation=filtered_relation,
)
reuse = can_reuse if join.m2m or reuse_with_filtered_relation else None
alias = self.join(
connection,
reuse=reuse,
reuse_with_filtered_relation=reuse_with_filtered_relation,
)
joins.append(alias)
if filtered_relation:
filtered_relation.path = joins[:]
return JoinInfo(final_field, targets, opts, joins, path, final_transformer)
def trim_joins(self, targets, joins, path):
"""
The 'target' parameter is the final field being joined to, 'joins'
is the full list of join aliases. The 'path' contain the PathInfos
used to create the joins.
Return the final target field and table alias and the new active
joins.
Always trim any direct join if the target column is already in the
previous table. Can't trim reverse joins as it's unknown if there's
anything on the other side of the join.
"""
joins = joins[:]
for pos, info in enumerate(reversed(path)):
if len(joins) == 1 or not info.direct:
break
if info.filtered_relation:
break
join_targets = {t.column for t in info.join_field.foreign_related_fields}
cur_targets = {t.column for t in targets}
if not cur_targets.issubset(join_targets):
break
targets_dict = {
r[1].column: r[0]
for r in info.join_field.related_fields
if r[1].column in cur_targets
}
targets = tuple(targets_dict[t.column] for t in targets)
self.unref_alias(joins.pop())
return targets, joins[-1], joins
@classmethod
def _gen_cols(cls, exprs, include_external=False, resolve_refs=True):
for expr in exprs:
if isinstance(expr, Col):
yield expr
elif include_external and callable(
getattr(expr, "get_external_cols", None)
):
yield from expr.get_external_cols()
elif hasattr(expr, "get_source_expressions"):
if not resolve_refs and isinstance(expr, Ref):
continue
yield from cls._gen_cols(
expr.get_source_expressions(),
include_external=include_external,
resolve_refs=resolve_refs,
)
@classmethod
def _gen_col_aliases(cls, exprs):
yield from (expr.alias for expr in cls._gen_cols(exprs))
def resolve_ref(self, name, allow_joins=True, reuse=None, summarize=False):
annotation = self.annotations.get(name)
if annotation is not None:
if not allow_joins:
for alias in self._gen_col_aliases([annotation]):
if isinstance(self.alias_map[alias], Join):
raise FieldError(
"Joined field references are not permitted in this query"
)
if summarize:
# Summarize currently means we are doing an aggregate() query
# which is executed as a wrapped subquery if any of the
# aggregate() elements reference an existing annotation. In
# that case we need to return a Ref to the subquery's annotation.
if name not in self.annotation_select:
raise FieldError(
"Cannot aggregate over the '%s' alias. Use annotate() "
"to promote it." % name
)
return Ref(name, self.annotation_select[name])
else:
return annotation
else:
field_list = name.split(LOOKUP_SEP)
annotation = self.annotations.get(field_list[0])
if annotation is not None:
for transform in field_list[1:]:
annotation = self.try_transform(annotation, transform)
return annotation
join_info = self.setup_joins(
field_list, self.get_meta(), self.get_initial_alias(), can_reuse=reuse
)
targets, final_alias, join_list = self.trim_joins(
join_info.targets, join_info.joins, join_info.path
)
if not allow_joins and len(join_list) > 1:
raise FieldError(
"Joined field references are not permitted in this query"
)
if len(targets) > 1:
raise FieldError(
"Referencing multicolumn fields with F() objects isn't supported"
)
# Verify that the last lookup in name is a field or a transform:
# transform_function() raises FieldError if not.
transform = join_info.transform_function(targets[0], final_alias)
if reuse is not None:
reuse.update(join_list)
return transform
def split_exclude(self, filter_expr, can_reuse, names_with_path):
"""
When doing an exclude against any kind of N-to-many relation, we need
to use a subquery. This method constructs the nested query, given the
original exclude filter (filter_expr) and the portion up to the first
N-to-many relation field.
For example, if the origin filter is ~Q(child__name='foo'), filter_expr
is ('child__name', 'foo') and can_reuse is a set of joins usable for
filters in the original query.
We will turn this into equivalent of:
WHERE NOT EXISTS(
SELECT 1
FROM child
WHERE name = 'foo' AND child.parent_id = parent.id
LIMIT 1
)
"""
# Generate the inner query.
query = self.__class__(self.model)
query._filtered_relations = self._filtered_relations
filter_lhs, filter_rhs = filter_expr
if isinstance(filter_rhs, OuterRef):
filter_rhs = OuterRef(filter_rhs)
elif isinstance(filter_rhs, F):
filter_rhs = OuterRef(filter_rhs.name)
query.add_filter(filter_lhs, filter_rhs)
query.clear_ordering(force=True)
# Try to have as simple as possible subquery -> trim leading joins from
# the subquery.
trimmed_prefix, contains_louter = query.trim_start(names_with_path)
col = query.select[0]
select_field = col.target
alias = col.alias
if alias in can_reuse:
pk = select_field.model._meta.pk
# Need to add a restriction so that outer query's filters are in effect for
# the subquery, too.
query.bump_prefix(self)
lookup_class = select_field.get_lookup("exact")
# Note that the query.select[0].alias is different from alias
# due to bump_prefix above.
lookup = lookup_class(pk.get_col(query.select[0].alias), pk.get_col(alias))
query.where.add(lookup, AND)
query.external_aliases[alias] = True
lookup_class = select_field.get_lookup("exact")
lookup = lookup_class(col, ResolvedOuterRef(trimmed_prefix))
query.where.add(lookup, AND)
condition, needed_inner = self.build_filter(Exists(query))
if contains_louter:
or_null_condition, _ = self.build_filter(
("%s__isnull" % trimmed_prefix, True),
current_negated=True,
branch_negated=True,
can_reuse=can_reuse,
)
condition.add(or_null_condition, OR)
# Note that the end result will be:
# (outercol NOT IN innerq AND outercol IS NOT NULL) OR outercol IS NULL.
# This might look crazy but due to how IN works, this seems to be
# correct. If the IS NOT NULL check is removed then outercol NOT
# IN will return UNKNOWN. If the IS NULL check is removed, then if
# outercol IS NULL we will not match the row.
return condition, needed_inner
def set_empty(self):
self.where.add(NothingNode(), AND)
for query in self.combined_queries:
query.set_empty()
def is_empty(self):
return any(isinstance(c, NothingNode) for c in self.where.children)
def set_limits(self, low=None, high=None):
"""
Adjust the limits on the rows retrieved. Use low/high to set these,
as it makes it more Pythonic to read and write. When the SQL query is
created, convert them to the appropriate offset and limit values.
Apply any limits passed in here to the existing constraints. Add low
to the current low value and clamp both to any existing high value.
"""
if high is not None:
if self.high_mark is not None:
self.high_mark = min(self.high_mark, self.low_mark + high)
else:
self.high_mark = self.low_mark + high
if low is not None:
if self.high_mark is not None:
self.low_mark = min(self.high_mark, self.low_mark + low)
else:
self.low_mark = self.low_mark + low
if self.low_mark == self.high_mark:
self.set_empty()
def clear_limits(self):
"""Clear any existing limits."""
self.low_mark, self.high_mark = 0, None
@property
def is_sliced(self):
return self.low_mark != 0 or self.high_mark is not None
def has_limit_one(self):
return self.high_mark is not None and (self.high_mark - self.low_mark) == 1
def can_filter(self):
"""
Return True if adding filters to this instance is still possible.
Typically, this means no limits or offsets have been put on the results.
"""
return not self.is_sliced
def clear_select_clause(self):
"""Remove all fields from SELECT clause."""
self.select = ()
self.default_cols = False
self.select_related = False
self.set_extra_mask(())
self.set_annotation_mask(())
def clear_select_fields(self):
"""
Clear the list of fields to select (but not extra_select columns).
Some queryset types completely replace any existing list of select
columns.
"""
self.select = ()
self.values_select = ()
def add_select_col(self, col, name):
self.select += (col,)
self.values_select += (name,)
def set_select(self, cols):
self.default_cols = False
self.select = tuple(cols)
def add_distinct_fields(self, *field_names):
"""
Add and resolve the given fields to the query's "distinct on" clause.
"""
self.distinct_fields = field_names
self.distinct = True
def add_fields(self, field_names, allow_m2m=True):
"""
Add the given (model) fields to the select set. Add the field names in
the order specified.
"""
alias = self.get_initial_alias()
opts = self.get_meta()
try:
cols = []
for name in field_names:
# Join promotion note - we must not remove any rows here, so
# if there is no existing joins, use outer join.
join_info = self.setup_joins(
name.split(LOOKUP_SEP), opts, alias, allow_many=allow_m2m
)
targets, final_alias, joins = self.trim_joins(
join_info.targets,
join_info.joins,
join_info.path,
)
for target in targets:
cols.append(join_info.transform_function(target, final_alias))
if cols:
self.set_select(cols)
except MultiJoin:
raise FieldError("Invalid field name: '%s'" % name)
except FieldError:
if LOOKUP_SEP in name:
# For lookups spanning over relationships, show the error
# from the model on which the lookup failed.
raise
elif name in self.annotations:
raise FieldError(
"Cannot select the '%s' alias. Use annotate() to promote "
"it." % name
)
else:
names = sorted(
[
*get_field_names_from_opts(opts),
*self.extra,
*self.annotation_select,
*self._filtered_relations,
]
)
raise FieldError(
"Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names))
)
def add_ordering(self, *ordering):
"""
Add items from the 'ordering' sequence to the query's "order by"
clause. These items are either field names (not column names) --
possibly with a direction prefix ('-' or '?') -- or OrderBy
expressions.
If 'ordering' is empty, clear all ordering from the query.
"""
errors = []
for item in ordering:
if isinstance(item, str):
if item == "?":
continue
if item.startswith("-"):
item = item[1:]
if item in self.annotations:
continue
if self.extra and item in self.extra:
continue
# names_to_path() validates the lookup. A descriptive
# FieldError will be raise if it's not.
self.names_to_path(item.split(LOOKUP_SEP), self.model._meta)
elif not hasattr(item, "resolve_expression"):
errors.append(item)
if getattr(item, "contains_aggregate", False):
raise FieldError(
"Using an aggregate in order_by() without also including "
"it in annotate() is not allowed: %s" % item
)
if errors:
raise FieldError("Invalid order_by arguments: %s" % errors)
if ordering:
self.order_by += ordering
else:
self.default_ordering = False
def clear_ordering(self, force=False, clear_default=True):
"""
Remove any ordering settings if the current query allows it without
side effects, set 'force' to True to clear the ordering regardless.
If 'clear_default' is True, there will be no ordering in the resulting
query (not even the model's default).
"""
if not force and (
self.is_sliced or self.distinct_fields or self.select_for_update
):
return
self.order_by = ()
self.extra_order_by = ()
if clear_default:
self.default_ordering = False
def set_group_by(self, allow_aliases=True):
"""
Expand the GROUP BY clause required by the query.
This will usually be the set of all non-aggregate fields in the
return data. If the database backend supports grouping by the
primary key, and the query would be equivalent, the optimization
will be made automatically.
"""
if allow_aliases:
# Column names from JOINs to check collisions with aliases.
column_names = set()
seen_models = set()
for join in list(self.alias_map.values())[1:]: # Skip base table.
model = join.join_field.related_model
if model not in seen_models:
column_names.update(
{field.column for field in model._meta.local_concrete_fields}
)
seen_models.add(model)
if self.values_select:
# If grouping by aliases is allowed assign selected values
# aliases by moving them to annotations.
group_by_annotations = {}
values_select = {}
for alias, expr in zip(self.values_select, self.select):
if isinstance(expr, Col):
values_select[alias] = expr
else:
group_by_annotations[alias] = expr
self.annotations = {**group_by_annotations, **self.annotations}
self.append_annotation_mask(group_by_annotations)
self.select = tuple(values_select.values())
self.values_select = tuple(values_select)
group_by = list(self.select)
for alias, annotation in self.annotation_select.items():
if not (group_by_cols := annotation.get_group_by_cols()):
continue
if (
allow_aliases
and alias not in column_names
and not annotation.contains_aggregate
):
group_by.append(Ref(alias, annotation))
else:
group_by.extend(group_by_cols)
self.group_by = tuple(group_by)
def add_select_related(self, fields):
"""
Set up the select_related data structure so that we only select
certain related models (as opposed to all models, when
self.select_related=True).
"""
if isinstance(self.select_related, bool):
field_dict = {}
else:
field_dict = self.select_related
for field in fields:
d = field_dict
for part in field.split(LOOKUP_SEP):
d = d.setdefault(part, {})
self.select_related = field_dict
def add_extra(self, select, select_params, where, params, tables, order_by):
"""
Add data to the various extra_* attributes for user-created additions
to the query.
"""
if select:
# We need to pair any placeholder markers in the 'select'
# dictionary with their parameters in 'select_params' so that
# subsequent updates to the select dictionary also adjust the
# parameters appropriately.
select_pairs = {}
if select_params:
param_iter = iter(select_params)
else:
param_iter = iter([])
for name, entry in select.items():
self.check_alias(name)
entry = str(entry)
entry_params = []
pos = entry.find("%s")
while pos != -1:
if pos == 0 or entry[pos - 1] != "%":
entry_params.append(next(param_iter))
pos = entry.find("%s", pos + 2)
select_pairs[name] = (entry, entry_params)
self.extra.update(select_pairs)
if where or params:
self.where.add(ExtraWhere(where, params), AND)
if tables:
self.extra_tables += tuple(tables)
if order_by:
self.extra_order_by = order_by
def clear_deferred_loading(self):
"""Remove any fields from the deferred loading set."""
self.deferred_loading = (frozenset(), True)
def add_deferred_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
exclude from loading from the database when automatic column selection
is done. Add the new field names to any existing field names that
are deferred (or removed from any existing field names that are marked
as the only ones for immediate loading).
"""
# Fields on related models are stored in the literal double-underscore
# format, so that we can use a set datastructure. We do the foo__bar
# splitting and handling when computing the SQL column names (as part of
# get_columns()).
existing, defer = self.deferred_loading
if defer:
# Add to existing deferred names.
self.deferred_loading = existing.union(field_names), True
else:
# Remove names from the set of any existing "immediate load" names.
if new_existing := existing.difference(field_names):
self.deferred_loading = new_existing, False
else:
self.clear_deferred_loading()
if new_only := set(field_names).difference(existing):
self.deferred_loading = new_only, True
def add_immediate_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
retrieve when the SQL is executed ("immediate loading" fields). The
field names replace any existing immediate loading field names. If
there are field names already specified for deferred loading, remove
those names from the new field_names before storing the new names
for immediate loading. (That is, immediate loading overrides any
existing immediate values, but respects existing deferrals.)
"""
existing, defer = self.deferred_loading
field_names = set(field_names)
if "pk" in field_names:
field_names.remove("pk")
field_names.add(self.get_meta().pk.name)
if defer:
# Remove any existing deferred names from the current set before
# setting the new names.
self.deferred_loading = field_names.difference(existing), False
else:
# Replace any existing "immediate load" field names.
self.deferred_loading = frozenset(field_names), False
def set_annotation_mask(self, names):
"""Set the mask of annotations that will be returned by the SELECT."""
if names is None:
self.annotation_select_mask = None
else:
self.annotation_select_mask = set(names)
self._annotation_select_cache = None
def append_annotation_mask(self, names):
if self.annotation_select_mask is not None:
self.set_annotation_mask(self.annotation_select_mask.union(names))
def set_extra_mask(self, names):
"""
Set the mask of extra select items that will be returned by SELECT.
Don't remove them from the Query since they might be used later.
"""
if names is None:
self.extra_select_mask = None
else:
self.extra_select_mask = set(names)
self._extra_select_cache = None
def set_values(self, fields):
self.select_related = False
self.clear_deferred_loading()
self.clear_select_fields()
self.has_select_fields = True
if fields:
field_names = []
extra_names = []
annotation_names = []
if not self.extra and not self.annotations:
# Shortcut - if there are no extra or annotations, then
# the values() clause must be just field names.
field_names = list(fields)
else:
self.default_cols = False
for f in fields:
if f in self.extra_select:
extra_names.append(f)
elif f in self.annotation_select:
annotation_names.append(f)
else:
field_names.append(f)
self.set_extra_mask(extra_names)
self.set_annotation_mask(annotation_names)
selected = frozenset(field_names + extra_names + annotation_names)
else:
field_names = [f.attname for f in self.model._meta.concrete_fields]
selected = frozenset(field_names)
# Selected annotations must be known before setting the GROUP BY
# clause.
if self.group_by is True:
self.add_fields(
(f.attname for f in self.model._meta.concrete_fields), False
)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
self.set_group_by(allow_aliases=False)
self.clear_select_fields()
elif self.group_by:
# Resolve GROUP BY annotation references if they are not part of
# the selected fields anymore.
group_by = []
for expr in self.group_by:
if isinstance(expr, Ref) and expr.refs not in selected:
expr = self.annotations[expr.refs]
group_by.append(expr)
self.group_by = tuple(group_by)
self.values_select = tuple(field_names)
self.add_fields(field_names, True)
@property
def annotation_select(self):
"""
Return the dictionary of aggregate columns that are not masked and
should be used in the SELECT clause. Cache this result for performance.
"""
if self._annotation_select_cache is not None:
return self._annotation_select_cache
elif not self.annotations:
return {}
elif self.annotation_select_mask is not None:
self._annotation_select_cache = {
k: v
for k, v in self.annotations.items()
if k in self.annotation_select_mask
}
return self._annotation_select_cache
else:
return self.annotations
@property
def extra_select(self):
if self._extra_select_cache is not None:
return self._extra_select_cache
if not self.extra:
return {}
elif self.extra_select_mask is not None:
self._extra_select_cache = {
k: v for k, v in self.extra.items() if k in self.extra_select_mask
}
return self._extra_select_cache
else:
return self.extra
def trim_start(self, names_with_path):
"""
Trim joins from the start of the join path. The candidates for trim
are the PathInfos in names_with_path structure that are m2m joins.
Also set the select column so the start matches the join.
This method is meant to be used for generating the subquery joins &
cols in split_exclude().
Return a lookup usable for doing outerq.filter(lookup=self) and a
boolean indicating if the joins in the prefix contain a LEFT OUTER join.
_"""
all_paths = []
for _, paths in names_with_path:
all_paths.extend(paths)
contains_louter = False
# Trim and operate only on tables that were generated for
# the lookup part of the query. That is, avoid trimming
# joins generated for F() expressions.
lookup_tables = [
t for t in self.alias_map if t in self._lookup_joins or t == self.base_table
]
for trimmed_paths, path in enumerate(all_paths):
if path.m2m:
break
if self.alias_map[lookup_tables[trimmed_paths + 1]].join_type == LOUTER:
contains_louter = True
alias = lookup_tables[trimmed_paths]
self.unref_alias(alias)
# The path.join_field is a Rel, lets get the other side's field
join_field = path.join_field.field
# Build the filter prefix.
paths_in_prefix = trimmed_paths
trimmed_prefix = []
for name, path in names_with_path:
if paths_in_prefix - len(path) < 0:
break
trimmed_prefix.append(name)
paths_in_prefix -= len(path)
trimmed_prefix.append(join_field.foreign_related_fields[0].name)
trimmed_prefix = LOOKUP_SEP.join(trimmed_prefix)
# Lets still see if we can trim the first join from the inner query
# (that is, self). We can't do this for:
# - LEFT JOINs because we would miss those rows that have nothing on
# the outer side,
# - INNER JOINs from filtered relations because we would miss their
# filters.
first_join = self.alias_map[lookup_tables[trimmed_paths + 1]]
if first_join.join_type != LOUTER and not first_join.filtered_relation:
select_fields = [r[0] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths + 1]
self.unref_alias(lookup_tables[trimmed_paths])
extra_restriction = join_field.get_extra_restriction(
None, lookup_tables[trimmed_paths + 1]
)
if extra_restriction:
self.where.add(extra_restriction, AND)
else:
# TODO: It might be possible to trim more joins from the start of the
# inner query if it happens to have a longer join chain containing the
# values in select_fields. Lets punt this one for now.
select_fields = [r[1] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths]
# The found starting point is likely a join_class instead of a
# base_table_class reference. But the first entry in the query's FROM
# clause must not be a JOIN.
for table in self.alias_map:
if self.alias_refcount[table] > 0:
self.alias_map[table] = self.base_table_class(
self.alias_map[table].table_name,
table,
)
break
self.set_select([f.get_col(select_alias) for f in select_fields])
return trimmed_prefix, contains_louter
def is_nullable(self, field):
"""
Check if the given field should be treated as nullable.
Some backends treat '' as null and Django treats such fields as
nullable for those backends. In such situations field.null can be
False even if we should treat the field as nullable.
"""
# We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have
# (nor should it have) knowledge of which connection is going to be
# used. The proper fix would be to defer all decisions where
# is_nullable() is needed to the compiler stage, but that is not easy
# to do currently.
return field.null or (
field.empty_strings_allowed
and connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
)
def get_order_dir(field, default="ASC"):
"""
Return the field name and direction for an order specification. For
example, '-foo' is returned as ('foo', 'DESC').
The 'default' param is used to indicate which way no prefix (or a '+'
prefix) should sort. The '-' prefix always sorts the opposite way.
"""
dirn = ORDER_DIR[default]
if field[0] == "-":
return field[1:], dirn[1]
return field, dirn[0]
class JoinPromoter:
"""
A class to abstract away join promotion problems for complex filter
conditions.
"""
def __init__(self, connector, num_children, negated):
self.connector = connector
self.negated = negated
if self.negated:
if connector == AND:
self.effective_connector = OR
else:
self.effective_connector = AND
else:
self.effective_connector = self.connector
self.num_children = num_children
# Maps of table alias to how many times it is seen as required for
# inner and/or outer joins.
self.votes = Counter()
def __repr__(self):
return (
f"{self.__class__.__qualname__}(connector={self.connector!r}, "
f"num_children={self.num_children!r}, negated={self.negated!r})"
)
def add_votes(self, votes):
"""
Add single vote per item to self.votes. Parameter can be any
iterable.
"""
self.votes.update(votes)
def update_join_types(self, query):
"""
Change join types so that the generated query is as efficient as
possible, but still correct. So, change as many joins as possible
to INNER, but don't make OUTER joins INNER if that could remove
results from the query.
"""
to_promote = set()
to_demote = set()
# The effective_connector is used so that NOT (a AND b) is treated
# similarly to (a OR b) for join promotion.
for table, votes in self.votes.items():
# We must use outer joins in OR case when the join isn't contained
# in all of the joins. Otherwise the INNER JOIN itself could remove
# valid results. Consider the case where a model with rel_a and
# rel_b relations is queried with rel_a__col=1 | rel_b__col=2. Now,
# if rel_a join doesn't produce any results is null (for example
# reverse foreign key or null value in direct foreign key), and
# there is a matching row in rel_b with col=2, then an INNER join
# to rel_a would remove a valid match from the query. So, we need
# to promote any existing INNER to LOUTER (it is possible this
# promotion in turn will be demoted later on).
if self.effective_connector == OR and votes < self.num_children:
to_promote.add(table)
# If connector is AND and there is a filter that can match only
# when there is a joinable row, then use INNER. For example, in
# rel_a__col=1 & rel_b__col=2, if either of the rels produce NULL
# as join output, then the col=1 or col=2 can't match (as
# NULL=anything is always false).
# For the OR case, if all children voted for a join to be inner,
# then we can use INNER for the join. For example:
# (rel_a__col__icontains=Alex | rel_a__col__icontains=Russell)
# then if rel_a doesn't produce any rows, the whole condition
# can't match. Hence we can safely use INNER join.
if self.effective_connector == AND or (
self.effective_connector == OR and votes == self.num_children
):
to_demote.add(table)
# Finally, what happens in cases where we have:
# (rel_a__col=1|rel_b__col=2) & rel_a__col__gte=0
# Now, we first generate the OR clause, and promote joins for it
# in the first if branch above. Both rel_a and rel_b are promoted
# to LOUTER joins. After that we do the AND case. The OR case
# voted no inner joins but the rel_a__col__gte=0 votes inner join
# for rel_a. We demote it back to INNER join (in AND case a single
# vote is enough). The demotion is OK, if rel_a doesn't produce
# rows, then the rel_a__col__gte=0 clause can't be true, and thus
# the whole clause must be false. So, it is safe to use INNER
# join.
# Note that in this example we could just as well have the __gte
# clause and the OR clause swapped. Or we could replace the __gte
# clause with an OR clause containing rel_a__col=1|rel_a__col=2,
# and again we could safely demote to INNER.
query.promote_joins(to_promote)
query.demote_joins(to_demote)
return to_demote
|
2d0c6a47fa6da16529e0ab18a57cec9a5556a7b574106355c9ddf15b7b52fb50 | import collections
import json
import re
from functools import partial
from itertools import chain
from django.core.exceptions import EmptyResultSet, FieldError, FullResultSet
from django.db import DatabaseError, NotSupportedError
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import F, OrderBy, RawSQL, Ref, Value
from django.db.models.functions import Cast, Random
from django.db.models.lookups import Lookup
from django.db.models.query_utils import select_related_descend
from django.db.models.sql.constants import (
CURSOR,
GET_ITERATOR_CHUNK_SIZE,
MULTI,
NO_RESULTS,
ORDER_DIR,
SINGLE,
)
from django.db.models.sql.query import Query, get_order_dir
from django.db.models.sql.where import AND
from django.db.transaction import TransactionManagementError
from django.utils.functional import cached_property
from django.utils.hashable import make_hashable
from django.utils.regex_helper import _lazy_re_compile
class SQLCompiler:
# Multiline ordering SQL clause may appear from RawSQL.
ordering_parts = _lazy_re_compile(
r"^(.*)\s(?:ASC|DESC).*",
re.MULTILINE | re.DOTALL,
)
def __init__(self, query, connection, using, elide_empty=True):
self.query = query
self.connection = connection
self.using = using
# Some queries, e.g. coalesced aggregation, need to be executed even if
# they would return an empty result set.
self.elide_empty = elide_empty
self.quote_cache = {"*": "*"}
# The select, klass_info, and annotations are needed by QuerySet.iterator()
# these are set as a side-effect of executing the query. Note that we calculate
# separately a list of extra select columns needed for grammatical correctness
# of the query, but these columns are not included in self.select.
self.select = None
self.annotation_col_map = None
self.klass_info = None
self._meta_ordering = None
def __repr__(self):
return (
f"<{self.__class__.__qualname__} "
f"model={self.query.model.__qualname__} "
f"connection={self.connection!r} using={self.using!r}>"
)
def setup_query(self, with_col_aliases=False):
if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map):
self.query.get_initial_alias()
self.select, self.klass_info, self.annotation_col_map = self.get_select(
with_col_aliases=with_col_aliases,
)
self.col_count = len(self.select)
def pre_sql_setup(self, with_col_aliases=False):
"""
Do any necessary class setup immediately prior to producing SQL. This
is for things that can't necessarily be done in __init__ because we
might not have all the pieces in place at that time.
"""
self.setup_query(with_col_aliases=with_col_aliases)
order_by = self.get_order_by()
self.where, self.having, self.qualify = self.query.where.split_having_qualify(
must_group_by=self.query.group_by is not None
)
extra_select = self.get_extra_select(order_by, self.select)
self.has_extra_select = bool(extra_select)
group_by = self.get_group_by(self.select + extra_select, order_by)
return extra_select, order_by, group_by
def get_group_by(self, select, order_by):
"""
Return a list of 2-tuples of form (sql, params).
The logic of what exactly the GROUP BY clause contains is hard
to describe in other words than "if it passes the test suite,
then it is correct".
"""
# Some examples:
# SomeModel.objects.annotate(Count('somecol'))
# GROUP BY: all fields of the model
#
# SomeModel.objects.values('name').annotate(Count('somecol'))
# GROUP BY: name
#
# SomeModel.objects.annotate(Count('somecol')).values('name')
# GROUP BY: all cols of the model
#
# SomeModel.objects.values('name', 'pk')
# .annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# SomeModel.objects.values('name').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# In fact, the self.query.group_by is the minimal set to GROUP BY. It
# can't be ever restricted to a smaller set, but additional columns in
# HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately
# the end result is that it is impossible to force the query to have
# a chosen GROUP BY clause - you can almost do this by using the form:
# .values(*wanted_cols).annotate(AnAggregate())
# but any later annotations, extra selects, values calls that
# refer some column outside of the wanted_cols, order_by, or even
# filter calls can alter the GROUP BY clause.
# The query.group_by is either None (no GROUP BY at all), True
# (group by select fields), or a list of expressions to be added
# to the group by.
if self.query.group_by is None:
return []
expressions = []
allows_group_by_refs = self.connection.features.allows_group_by_refs
if self.query.group_by is not True:
# If the group by is set to a list (by .values() call most likely),
# then we need to add everything in it to the GROUP BY clause.
# Backwards compatibility hack for setting query.group_by. Remove
# when we have public API way of forcing the GROUP BY clause.
# Converts string references to expressions.
for expr in self.query.group_by:
if not hasattr(expr, "as_sql"):
expr = self.query.resolve_ref(expr)
if not allows_group_by_refs and isinstance(expr, Ref):
expr = expr.source
expressions.append(expr)
# Note that even if the group_by is set, it is only the minimal
# set to group by. So, we need to add cols in select, order_by, and
# having into the select in any case.
ref_sources = {expr.source for expr in expressions if isinstance(expr, Ref)}
aliased_exprs = {}
for expr, _, alias in select:
# Skip members of the select clause that are already included
# by reference.
if expr in ref_sources:
continue
if alias:
aliased_exprs[expr] = alias
expressions.extend(expr.get_group_by_cols())
if not self._meta_ordering:
for expr, (sql, params, is_ref) in order_by:
# Skip references to the SELECT clause, as all expressions in
# the SELECT clause are already part of the GROUP BY.
if not is_ref:
expressions.extend(expr.get_group_by_cols())
having_group_by = self.having.get_group_by_cols() if self.having else ()
for expr in having_group_by:
expressions.append(expr)
result = []
seen = set()
expressions = self.collapse_group_by(expressions, having_group_by)
for expr in expressions:
if allows_group_by_refs and (alias := aliased_exprs.get(expr)):
expr = Ref(alias, expr)
try:
sql, params = self.compile(expr)
except (EmptyResultSet, FullResultSet):
continue
sql, params = expr.select_format(self, sql, params)
params_hash = make_hashable(params)
if (sql, params_hash) not in seen:
result.append((sql, params))
seen.add((sql, params_hash))
return result
def collapse_group_by(self, expressions, having):
# If the database supports group by functional dependence reduction,
# then the expressions can be reduced to the set of selected table
# primary keys as all other columns are functionally dependent on them.
if self.connection.features.allows_group_by_selected_pks:
# Filter out all expressions associated with a table's primary key
# present in the grouped columns. This is done by identifying all
# tables that have their primary key included in the grouped
# columns and removing non-primary key columns referring to them.
# Unmanaged models are excluded because they could be representing
# database views on which the optimization might not be allowed.
pks = {
expr
for expr in expressions
if (
hasattr(expr, "target")
and expr.target.primary_key
and self.connection.features.allows_group_by_selected_pks_on_model(
expr.target.model
)
)
}
aliases = {expr.alias for expr in pks}
expressions = [
expr
for expr in expressions
if expr in pks
or expr in having
or getattr(expr, "alias", None) not in aliases
]
return expressions
def get_select(self, with_col_aliases=False):
"""
Return three values:
- a list of 3-tuples of (expression, (sql, params), alias)
- a klass_info structure,
- a dictionary of annotations
The (sql, params) is what the expression will produce, and alias is the
"AS alias" for the column (possibly None).
The klass_info structure contains the following information:
- The base model of the query.
- Which columns for that model are present in the query (by
position of the select clause).
- related_klass_infos: [f, klass_info] to descent into
The annotations is a dictionary of {'attname': column position} values.
"""
select = []
klass_info = None
annotations = {}
select_idx = 0
for alias, (sql, params) in self.query.extra_select.items():
annotations[alias] = select_idx
select.append((RawSQL(sql, params), alias))
select_idx += 1
assert not (self.query.select and self.query.default_cols)
select_mask = self.query.get_select_mask()
if self.query.default_cols:
cols = self.get_default_columns(select_mask)
else:
# self.query.select is a special case. These columns never go to
# any model.
cols = self.query.select
if cols:
select_list = []
for col in cols:
select_list.append(select_idx)
select.append((col, None))
select_idx += 1
klass_info = {
"model": self.query.model,
"select_fields": select_list,
}
for alias, annotation in self.query.annotation_select.items():
annotations[alias] = select_idx
select.append((annotation, alias))
select_idx += 1
if self.query.select_related:
related_klass_infos = self.get_related_selections(select, select_mask)
klass_info["related_klass_infos"] = related_klass_infos
def get_select_from_parent(klass_info):
for ki in klass_info["related_klass_infos"]:
if ki["from_parent"]:
ki["select_fields"] = (
klass_info["select_fields"] + ki["select_fields"]
)
get_select_from_parent(ki)
get_select_from_parent(klass_info)
ret = []
col_idx = 1
for col, alias in select:
try:
sql, params = self.compile(col)
except EmptyResultSet:
empty_result_set_value = getattr(
col, "empty_result_set_value", NotImplemented
)
if empty_result_set_value is NotImplemented:
# Select a predicate that's always False.
sql, params = "0", ()
else:
sql, params = self.compile(Value(empty_result_set_value))
except FullResultSet:
sql, params = self.compile(Value(True))
else:
sql, params = col.select_format(self, sql, params)
if alias is None and with_col_aliases:
alias = f"col{col_idx}"
col_idx += 1
ret.append((col, (sql, params), alias))
return ret, klass_info, annotations
def _order_by_pairs(self):
if self.query.extra_order_by:
ordering = self.query.extra_order_by
elif not self.query.default_ordering:
ordering = self.query.order_by
elif self.query.order_by:
ordering = self.query.order_by
elif (meta := self.query.get_meta()) and meta.ordering:
ordering = meta.ordering
self._meta_ordering = ordering
else:
ordering = []
if self.query.standard_ordering:
default_order, _ = ORDER_DIR["ASC"]
else:
default_order, _ = ORDER_DIR["DESC"]
for field in ordering:
if hasattr(field, "resolve_expression"):
if isinstance(field, Value):
# output_field must be resolved for constants.
field = Cast(field, field.output_field)
if not isinstance(field, OrderBy):
field = field.asc()
if not self.query.standard_ordering:
field = field.copy()
field.reverse_ordering()
if isinstance(field.expression, F) and (
annotation := self.query.annotation_select.get(
field.expression.name
)
):
field.expression = Ref(field.expression.name, annotation)
yield field, isinstance(field.expression, Ref)
continue
if field == "?": # random
yield OrderBy(Random()), False
continue
col, order = get_order_dir(field, default_order)
descending = order == "DESC"
if col in self.query.annotation_select:
# Reference to expression in SELECT clause
yield (
OrderBy(
Ref(col, self.query.annotation_select[col]),
descending=descending,
),
True,
)
continue
if col in self.query.annotations:
# References to an expression which is masked out of the SELECT
# clause.
if self.query.combinator and self.select:
# Don't use the resolved annotation because other
# combinated queries might define it differently.
expr = F(col)
else:
expr = self.query.annotations[col]
if isinstance(expr, Value):
# output_field must be resolved for constants.
expr = Cast(expr, expr.output_field)
yield OrderBy(expr, descending=descending), False
continue
if "." in field:
# This came in through an extra(order_by=...) addition. Pass it
# on verbatim.
table, col = col.split(".", 1)
yield (
OrderBy(
RawSQL(
"%s.%s" % (self.quote_name_unless_alias(table), col), []
),
descending=descending,
),
False,
)
continue
if self.query.extra and col in self.query.extra:
if col in self.query.extra_select:
yield (
OrderBy(
Ref(col, RawSQL(*self.query.extra[col])),
descending=descending,
),
True,
)
else:
yield (
OrderBy(RawSQL(*self.query.extra[col]), descending=descending),
False,
)
else:
if self.query.combinator and self.select:
# Don't use the first model's field because other
# combinated queries might define it differently.
yield OrderBy(F(col), descending=descending), False
else:
# 'col' is of the form 'field' or 'field1__field2' or
# '-field1__field2__field', etc.
yield from self.find_ordering_name(
field,
self.query.get_meta(),
default_order=default_order,
)
def get_order_by(self):
"""
Return a list of 2-tuples of the form (expr, (sql, params, is_ref)) for
the ORDER BY clause.
The order_by clause can alter the select clause (for example it can add
aliases to clauses that do not yet have one, or it can add totally new
select clauses).
"""
result = []
seen = set()
for expr, is_ref in self._order_by_pairs():
resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None)
if not is_ref and self.query.combinator and self.select:
src = resolved.expression
expr_src = expr.expression
for sel_expr, _, col_alias in self.select:
if src == sel_expr:
# When values() is used the exact alias must be used to
# reference annotations.
if (
self.query.has_select_fields
and col_alias in self.query.annotation_select
and not (
isinstance(expr_src, F) and col_alias == expr_src.name
)
):
continue
resolved.set_source_expressions(
[Ref(col_alias if col_alias else src.target.column, src)]
)
break
else:
# Add column used in ORDER BY clause to the selected
# columns and to each combined query.
order_by_idx = len(self.query.select) + 1
col_alias = f"__orderbycol{order_by_idx}"
for q in self.query.combined_queries:
# If fields were explicitly selected through values()
# combined queries cannot be augmented.
if q.has_select_fields:
raise DatabaseError(
"ORDER BY term does not match any column in "
"the result set."
)
q.add_annotation(expr_src, col_alias)
self.query.add_select_col(resolved, col_alias)
resolved.set_source_expressions([Ref(col_alias, src)])
sql, params = self.compile(resolved)
# Don't add the same column twice, but the order direction is
# not taken into account so we strip it. When this entire method
# is refactored into expressions, then we can check each part as we
# generate it.
without_ordering = self.ordering_parts.search(sql)[1]
params_hash = make_hashable(params)
if (without_ordering, params_hash) in seen:
continue
seen.add((without_ordering, params_hash))
result.append((resolved, (sql, params, is_ref)))
return result
def get_extra_select(self, order_by, select):
extra_select = []
if self.query.distinct and not self.query.distinct_fields:
select_sql = [t[1] for t in select]
for expr, (sql, params, is_ref) in order_by:
without_ordering = self.ordering_parts.search(sql)[1]
if not is_ref and (without_ordering, params) not in select_sql:
extra_select.append((expr, (without_ordering, params), None))
return extra_select
def quote_name_unless_alias(self, name):
"""
A wrapper around connection.ops.quote_name that doesn't quote aliases
for table names. This avoids problems with some SQL dialects that treat
quoted strings specially (e.g. PostgreSQL).
"""
if name in self.quote_cache:
return self.quote_cache[name]
if (
(name in self.query.alias_map and name not in self.query.table_map)
or name in self.query.extra_select
or (
self.query.external_aliases.get(name)
and name not in self.query.table_map
)
):
self.quote_cache[name] = name
return name
r = self.connection.ops.quote_name(name)
self.quote_cache[name] = r
return r
def compile(self, node):
vendor_impl = getattr(node, "as_" + self.connection.vendor, None)
if vendor_impl:
sql, params = vendor_impl(self, self.connection)
else:
sql, params = node.as_sql(self, self.connection)
return sql, params
def get_combinator_sql(self, combinator, all):
features = self.connection.features
compilers = [
query.get_compiler(self.using, self.connection, self.elide_empty)
for query in self.query.combined_queries
]
if not features.supports_slicing_ordering_in_compound:
for compiler in compilers:
if compiler.query.is_sliced:
raise DatabaseError(
"LIMIT/OFFSET not allowed in subqueries of compound statements."
)
if compiler.get_order_by():
raise DatabaseError(
"ORDER BY not allowed in subqueries of compound statements."
)
elif self.query.is_sliced and combinator == "union":
limit = (self.query.low_mark, self.query.high_mark)
for compiler in compilers:
# A sliced union cannot have its parts elided as some of them
# might be sliced as well and in the event where only a single
# part produces a non-empty resultset it might be impossible to
# generate valid SQL.
compiler.elide_empty = False
if not compiler.query.is_sliced:
compiler.query.set_limits(*limit)
parts = ()
for compiler in compilers:
try:
# If the columns list is limited, then all combined queries
# must have the same columns list. Set the selects defined on
# the query on all combined queries, if not already set.
if not compiler.query.values_select and self.query.values_select:
compiler.query = compiler.query.clone()
compiler.query.set_values(
(
*self.query.extra_select,
*self.query.values_select,
*self.query.annotation_select,
)
)
part_sql, part_args = compiler.as_sql(with_col_aliases=True)
if compiler.query.combinator:
# Wrap in a subquery if wrapping in parentheses isn't
# supported.
if not features.supports_parentheses_in_compound:
part_sql = "SELECT * FROM ({})".format(part_sql)
# Add parentheses when combining with compound query if not
# already added for all compound queries.
elif (
self.query.subquery
or not features.supports_slicing_ordering_in_compound
):
part_sql = "({})".format(part_sql)
elif (
self.query.subquery
and features.supports_slicing_ordering_in_compound
):
part_sql = "({})".format(part_sql)
parts += ((part_sql, part_args),)
except EmptyResultSet:
# Omit the empty queryset with UNION and with DIFFERENCE if the
# first queryset is nonempty.
if combinator == "union" or (combinator == "difference" and parts):
continue
raise
if not parts:
raise EmptyResultSet
combinator_sql = self.connection.ops.set_operators[combinator]
if all and combinator == "union":
combinator_sql += " ALL"
braces = "{}"
if not self.query.subquery and features.supports_slicing_ordering_in_compound:
braces = "({})"
sql_parts, args_parts = zip(
*((braces.format(sql), args) for sql, args in parts)
)
result = [" {} ".format(combinator_sql).join(sql_parts)]
params = []
for part in args_parts:
params.extend(part)
return result, params
def get_qualify_sql(self):
where_parts = []
if self.where:
where_parts.append(self.where)
if self.having:
where_parts.append(self.having)
inner_query = self.query.clone()
inner_query.subquery = True
inner_query.where = inner_query.where.__class__(where_parts)
# Augment the inner query with any window function references that
# might have been masked via values() and alias(). If any masked
# aliases are added they'll be masked again to avoid fetching
# the data in the `if qual_aliases` branch below.
select = {
expr: alias for expr, _, alias in self.get_select(with_col_aliases=True)[0]
}
select_aliases = set(select.values())
qual_aliases = set()
replacements = {}
def collect_replacements(expressions):
while expressions:
expr = expressions.pop()
if expr in replacements:
continue
elif select_alias := select.get(expr):
replacements[expr] = select_alias
elif isinstance(expr, Lookup):
expressions.extend(expr.get_source_expressions())
elif isinstance(expr, Ref):
if expr.refs not in select_aliases:
expressions.extend(expr.get_source_expressions())
else:
num_qual_alias = len(qual_aliases)
select_alias = f"qual{num_qual_alias}"
qual_aliases.add(select_alias)
inner_query.add_annotation(expr, select_alias)
replacements[expr] = select_alias
collect_replacements(list(self.qualify.leaves()))
self.qualify = self.qualify.replace_expressions(
{expr: Ref(alias, expr) for expr, alias in replacements.items()}
)
order_by = []
for order_by_expr, *_ in self.get_order_by():
collect_replacements(order_by_expr.get_source_expressions())
order_by.append(
order_by_expr.replace_expressions(
{expr: Ref(alias, expr) for expr, alias in replacements.items()}
)
)
inner_query_compiler = inner_query.get_compiler(
self.using, elide_empty=self.elide_empty
)
inner_sql, inner_params = inner_query_compiler.as_sql(
# The limits must be applied to the outer query to avoid pruning
# results too eagerly.
with_limits=False,
# Force unique aliasing of selected columns to avoid collisions
# and make rhs predicates referencing easier.
with_col_aliases=True,
)
qualify_sql, qualify_params = self.compile(self.qualify)
result = [
"SELECT * FROM (",
inner_sql,
")",
self.connection.ops.quote_name("qualify"),
"WHERE",
qualify_sql,
]
if qual_aliases:
# If some select aliases were unmasked for filtering purposes they
# must be masked back.
cols = [self.connection.ops.quote_name(alias) for alias in select.values()]
result = [
"SELECT",
", ".join(cols),
"FROM (",
*result,
")",
self.connection.ops.quote_name("qualify_mask"),
]
params = list(inner_params) + qualify_params
# As the SQL spec is unclear on whether or not derived tables
# ordering must propagate it has to be explicitly repeated on the
# outer-most query to ensure it's preserved.
if order_by:
ordering_sqls = []
for ordering in order_by:
ordering_sql, ordering_params = self.compile(ordering)
ordering_sqls.append(ordering_sql)
params.extend(ordering_params)
result.extend(["ORDER BY", ", ".join(ordering_sqls)])
return result, params
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
If 'with_limits' is False, any limit/offset information is not included
in the query.
"""
refcounts_before = self.query.alias_refcount.copy()
try:
combinator = self.query.combinator
extra_select, order_by, group_by = self.pre_sql_setup(
with_col_aliases=with_col_aliases or bool(combinator),
)
for_update_part = None
# Is a LIMIT/OFFSET clause needed?
with_limit_offset = with_limits and self.query.is_sliced
combinator = self.query.combinator
features = self.connection.features
if combinator:
if not getattr(features, "supports_select_{}".format(combinator)):
raise NotSupportedError(
"{} is not supported on this database backend.".format(
combinator
)
)
result, params = self.get_combinator_sql(
combinator, self.query.combinator_all
)
elif self.qualify:
result, params = self.get_qualify_sql()
order_by = None
else:
distinct_fields, distinct_params = self.get_distinct()
# This must come after 'select', 'ordering', and 'distinct'
# (see docstring of get_from_clause() for details).
from_, f_params = self.get_from_clause()
try:
where, w_params = (
self.compile(self.where) if self.where is not None else ("", [])
)
except EmptyResultSet:
if self.elide_empty:
raise
# Use a predicate that's always False.
where, w_params = "0 = 1", []
except FullResultSet:
where, w_params = "", []
try:
having, h_params = (
self.compile(self.having)
if self.having is not None
else ("", [])
)
except FullResultSet:
having, h_params = "", []
result = ["SELECT"]
params = []
if self.query.distinct:
distinct_result, distinct_params = self.connection.ops.distinct_sql(
distinct_fields,
distinct_params,
)
result += distinct_result
params += distinct_params
out_cols = []
for _, (s_sql, s_params), alias in self.select + extra_select:
if alias:
s_sql = "%s AS %s" % (
s_sql,
self.connection.ops.quote_name(alias),
)
params.extend(s_params)
out_cols.append(s_sql)
result += [", ".join(out_cols)]
if from_:
result += ["FROM", *from_]
elif self.connection.features.bare_select_suffix:
result += [self.connection.features.bare_select_suffix]
params.extend(f_params)
if self.query.select_for_update and features.has_select_for_update:
if (
self.connection.get_autocommit()
# Don't raise an exception when database doesn't
# support transactions, as it's a noop.
and features.supports_transactions
):
raise TransactionManagementError(
"select_for_update cannot be used outside of a transaction."
)
if (
with_limit_offset
and not features.supports_select_for_update_with_limit
):
raise NotSupportedError(
"LIMIT/OFFSET is not supported with "
"select_for_update on this database backend."
)
nowait = self.query.select_for_update_nowait
skip_locked = self.query.select_for_update_skip_locked
of = self.query.select_for_update_of
no_key = self.query.select_for_no_key_update
# If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the
# backend doesn't support it, raise NotSupportedError to
# prevent a possible deadlock.
if nowait and not features.has_select_for_update_nowait:
raise NotSupportedError(
"NOWAIT is not supported on this database backend."
)
elif skip_locked and not features.has_select_for_update_skip_locked:
raise NotSupportedError(
"SKIP LOCKED is not supported on this database backend."
)
elif of and not features.has_select_for_update_of:
raise NotSupportedError(
"FOR UPDATE OF is not supported on this database backend."
)
elif no_key and not features.has_select_for_no_key_update:
raise NotSupportedError(
"FOR NO KEY UPDATE is not supported on this "
"database backend."
)
for_update_part = self.connection.ops.for_update_sql(
nowait=nowait,
skip_locked=skip_locked,
of=self.get_select_for_update_of_arguments(),
no_key=no_key,
)
if for_update_part and features.for_update_after_from:
result.append(for_update_part)
if where:
result.append("WHERE %s" % where)
params.extend(w_params)
grouping = []
for g_sql, g_params in group_by:
grouping.append(g_sql)
params.extend(g_params)
if grouping:
if distinct_fields:
raise NotImplementedError(
"annotate() + distinct(fields) is not implemented."
)
order_by = order_by or self.connection.ops.force_no_ordering()
result.append("GROUP BY %s" % ", ".join(grouping))
if self._meta_ordering:
order_by = None
if having:
result.append("HAVING %s" % having)
params.extend(h_params)
if self.query.explain_info:
result.insert(
0,
self.connection.ops.explain_query_prefix(
self.query.explain_info.format,
**self.query.explain_info.options,
),
)
if order_by:
ordering = []
for _, (o_sql, o_params, _) in order_by:
ordering.append(o_sql)
params.extend(o_params)
order_by_sql = "ORDER BY %s" % ", ".join(ordering)
if combinator and features.requires_compound_order_by_subquery:
result = ["SELECT * FROM (", *result, ")", order_by_sql]
else:
result.append(order_by_sql)
if with_limit_offset:
result.append(
self.connection.ops.limit_offset_sql(
self.query.low_mark, self.query.high_mark
)
)
if for_update_part and not features.for_update_after_from:
result.append(for_update_part)
if self.query.subquery and extra_select:
# If the query is used as a subquery, the extra selects would
# result in more columns than the left-hand side expression is
# expecting. This can happen when a subquery uses a combination
# of order_by() and distinct(), forcing the ordering expressions
# to be selected as well. Wrap the query in another subquery
# to exclude extraneous selects.
sub_selects = []
sub_params = []
for index, (select, _, alias) in enumerate(self.select, start=1):
if alias:
sub_selects.append(
"%s.%s"
% (
self.connection.ops.quote_name("subquery"),
self.connection.ops.quote_name(alias),
)
)
else:
select_clone = select.relabeled_clone(
{select.alias: "subquery"}
)
subselect, subparams = select_clone.as_sql(
self, self.connection
)
sub_selects.append(subselect)
sub_params.extend(subparams)
return "SELECT %s FROM (%s) subquery" % (
", ".join(sub_selects),
" ".join(result),
), tuple(sub_params + params)
return " ".join(result), tuple(params)
finally:
# Finally do cleanup - get rid of the joins we created above.
self.query.reset_refcounts(refcounts_before)
def get_default_columns(
self, select_mask, start_alias=None, opts=None, from_parent=None
):
"""
Compute the default columns for selecting every field in the base
model. Will sometimes be called to pull in related models (e.g. via
select_related), in which case "opts" and "start_alias" will be given
to provide a starting point for the traversal.
Return a list of strings, quoted appropriately for use in SQL
directly, as well as a set of aliases used in the select statement (if
'as_pairs' is True, return a list of (alias, col_name) pairs instead
of strings as the first component and None as the second component).
"""
result = []
if opts is None:
if (opts := self.query.get_meta()) is None:
return result
start_alias = start_alias or self.query.get_initial_alias()
# The 'seen_models' is used to optimize checking the needed parent
# alias for a given field. This also includes None -> start_alias to
# be used by local fields.
seen_models = {None: start_alias}
for field in opts.concrete_fields:
model = field.model._meta.concrete_model
# A proxy model will have a different model and concrete_model. We
# will assign None if the field belongs to this model.
if model == opts.model:
model = None
if (
from_parent
and model is not None
and issubclass(
from_parent._meta.concrete_model, model._meta.concrete_model
)
):
# Avoid loading data for already loaded parents.
# We end up here in the case select_related() resolution
# proceeds from parent model to child model. In that case the
# parent model data is already present in the SELECT clause,
# and we want to avoid reloading the same data again.
continue
if select_mask and field not in select_mask:
continue
alias = self.query.join_parent_model(opts, model, start_alias, seen_models)
column = field.get_col(alias)
result.append(column)
return result
def get_distinct(self):
"""
Return a quoted list of fields to use in DISTINCT ON part of the query.
This method can alter the tables in the query, and thus it must be
called before get_from_clause().
"""
result = []
params = []
opts = self.query.get_meta()
for name in self.query.distinct_fields:
parts = name.split(LOOKUP_SEP)
_, targets, alias, joins, path, _, transform_function = self._setup_joins(
parts, opts, None
)
targets, alias, _ = self.query.trim_joins(targets, joins, path)
for target in targets:
if name in self.query.annotation_select:
result.append(self.connection.ops.quote_name(name))
else:
r, p = self.compile(transform_function(target, alias))
result.append(r)
params.append(p)
return result, params
def find_ordering_name(
self, name, opts, alias=None, default_order="ASC", already_seen=None
):
"""
Return the table alias (the name might be ambiguous, the alias will
not be) and column name for ordering by the given 'name' parameter.
The 'name' is of the form 'field1__field2__...__fieldN'.
"""
name, order = get_order_dir(name, default_order)
descending = order == "DESC"
pieces = name.split(LOOKUP_SEP)
(
field,
targets,
alias,
joins,
path,
opts,
transform_function,
) = self._setup_joins(pieces, opts, alias)
# If we get to this point and the field is a relation to another model,
# append the default ordering for that model unless it is the pk
# shortcut or the attribute name of the field that is specified or
# there are transforms to process.
if (
field.is_relation
and opts.ordering
and getattr(field, "attname", None) != pieces[-1]
and name != "pk"
and not getattr(transform_function, "has_transforms", False)
):
# Firstly, avoid infinite loops.
already_seen = already_seen or set()
join_tuple = tuple(
getattr(self.query.alias_map[j], "join_cols", None) for j in joins
)
if join_tuple in already_seen:
raise FieldError("Infinite loop caused by ordering.")
already_seen.add(join_tuple)
results = []
for item in opts.ordering:
if hasattr(item, "resolve_expression") and not isinstance(
item, OrderBy
):
item = item.desc() if descending else item.asc()
if isinstance(item, OrderBy):
results.append(
(item.prefix_references(f"{name}{LOOKUP_SEP}"), False)
)
continue
results.extend(
(expr.prefix_references(f"{name}{LOOKUP_SEP}"), is_ref)
for expr, is_ref in self.find_ordering_name(
item, opts, alias, order, already_seen
)
)
return results
targets, alias, _ = self.query.trim_joins(targets, joins, path)
return [
(OrderBy(transform_function(t, alias), descending=descending), False)
for t in targets
]
def _setup_joins(self, pieces, opts, alias):
"""
Helper method for get_order_by() and get_distinct().
get_ordering() and get_distinct() must produce same target columns on
same input, as the prefixes of get_ordering() and get_distinct() must
match. Executing SQL where this is not true is an error.
"""
alias = alias or self.query.get_initial_alias()
field, targets, opts, joins, path, transform_function = self.query.setup_joins(
pieces, opts, alias
)
alias = joins[-1]
return field, targets, alias, joins, path, opts, transform_function
def get_from_clause(self):
"""
Return a list of strings that are joined together to go after the
"FROM" part of the query, as well as a list any extra parameters that
need to be included. Subclasses, can override this to create a
from-clause via a "select".
This should only be called after any SQL construction methods that
might change the tables that are needed. This means the select columns,
ordering, and distinct must be done first.
"""
result = []
params = []
for alias in tuple(self.query.alias_map):
if not self.query.alias_refcount[alias]:
continue
try:
from_clause = self.query.alias_map[alias]
except KeyError:
# Extra tables can end up in self.tables, but not in the
# alias_map if they aren't in a join. That's OK. We skip them.
continue
clause_sql, clause_params = self.compile(from_clause)
result.append(clause_sql)
params.extend(clause_params)
for t in self.query.extra_tables:
alias, _ = self.query.table_alias(t)
# Only add the alias if it's not already present (the table_alias()
# call increments the refcount, so an alias refcount of one means
# this is the only reference).
if (
alias not in self.query.alias_map
or self.query.alias_refcount[alias] == 1
):
result.append(", %s" % self.quote_name_unless_alias(alias))
return result, params
def get_related_selections(
self,
select,
select_mask,
opts=None,
root_alias=None,
cur_depth=1,
requested=None,
restricted=None,
):
"""
Fill in the information needed for a select_related query. The current
depth is measured as the number of connections away from the root model
(for example, cur_depth=1 means we are looking at models with direct
connections to the root model).
"""
def _get_field_choices():
direct_choices = (f.name for f in opts.fields if f.is_relation)
reverse_choices = (
f.field.related_query_name()
for f in opts.related_objects
if f.field.unique
)
return chain(
direct_choices, reverse_choices, self.query._filtered_relations
)
related_klass_infos = []
if not restricted and cur_depth > self.query.max_depth:
# We've recursed far enough; bail out.
return related_klass_infos
if not opts:
opts = self.query.get_meta()
root_alias = self.query.get_initial_alias()
# Setup for the case when only particular related fields should be
# included in the related selection.
fields_found = set()
if requested is None:
restricted = isinstance(self.query.select_related, dict)
if restricted:
requested = self.query.select_related
def get_related_klass_infos(klass_info, related_klass_infos):
klass_info["related_klass_infos"] = related_klass_infos
for f in opts.fields:
fields_found.add(f.name)
if restricted:
next = requested.get(f.name, {})
if not f.is_relation:
# If a non-related field is used like a relation,
# or if a single non-relational field is given.
if next or f.name in requested:
raise FieldError(
"Non-relational field given in select_related: '%s'. "
"Choices are: %s"
% (
f.name,
", ".join(_get_field_choices()) or "(none)",
)
)
else:
next = False
if not select_related_descend(f, restricted, requested, select_mask):
continue
related_select_mask = select_mask.get(f) or {}
klass_info = {
"model": f.remote_field.model,
"field": f,
"reverse": False,
"local_setter": f.set_cached_value,
"remote_setter": f.remote_field.set_cached_value
if f.unique
else lambda x, y: None,
"from_parent": False,
}
related_klass_infos.append(klass_info)
select_fields = []
_, _, _, joins, _, _ = self.query.setup_joins([f.name], opts, root_alias)
alias = joins[-1]
columns = self.get_default_columns(
related_select_mask, start_alias=alias, opts=f.remote_field.model._meta
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info["select_fields"] = select_fields
next_klass_infos = self.get_related_selections(
select,
related_select_mask,
f.remote_field.model._meta,
alias,
cur_depth + 1,
next,
restricted,
)
get_related_klass_infos(klass_info, next_klass_infos)
if restricted:
related_fields = [
(o.field, o.related_model)
for o in opts.related_objects
if o.field.unique and not o.many_to_many
]
for related_field, model in related_fields:
related_select_mask = select_mask.get(related_field) or {}
if not select_related_descend(
related_field,
restricted,
requested,
related_select_mask,
reverse=True,
):
continue
related_field_name = related_field.related_query_name()
fields_found.add(related_field_name)
join_info = self.query.setup_joins(
[related_field_name], opts, root_alias
)
alias = join_info.joins[-1]
from_parent = issubclass(model, opts.model) and model is not opts.model
klass_info = {
"model": model,
"field": related_field,
"reverse": True,
"local_setter": related_field.remote_field.set_cached_value,
"remote_setter": related_field.set_cached_value,
"from_parent": from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
related_select_mask,
start_alias=alias,
opts=model._meta,
from_parent=opts.model,
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info["select_fields"] = select_fields
next = requested.get(related_field.related_query_name(), {})
next_klass_infos = self.get_related_selections(
select,
related_select_mask,
model._meta,
alias,
cur_depth + 1,
next,
restricted,
)
get_related_klass_infos(klass_info, next_klass_infos)
def local_setter(final_field, obj, from_obj):
# Set a reverse fk object when relation is non-empty.
if from_obj:
final_field.remote_field.set_cached_value(from_obj, obj)
def remote_setter(name, obj, from_obj):
setattr(from_obj, name, obj)
for name in list(requested):
# Filtered relations work only on the topmost level.
if cur_depth > 1:
break
if name in self.query._filtered_relations:
fields_found.add(name)
final_field, _, join_opts, joins, _, _ = self.query.setup_joins(
[name], opts, root_alias
)
model = join_opts.model
alias = joins[-1]
from_parent = (
issubclass(model, opts.model) and model is not opts.model
)
klass_info = {
"model": model,
"field": final_field,
"reverse": True,
"local_setter": partial(local_setter, final_field),
"remote_setter": partial(remote_setter, name),
"from_parent": from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
field_select_mask = select_mask.get((name, final_field)) or {}
columns = self.get_default_columns(
field_select_mask,
start_alias=alias,
opts=model._meta,
from_parent=opts.model,
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info["select_fields"] = select_fields
next_requested = requested.get(name, {})
next_klass_infos = self.get_related_selections(
select,
field_select_mask,
opts=model._meta,
root_alias=alias,
cur_depth=cur_depth + 1,
requested=next_requested,
restricted=restricted,
)
get_related_klass_infos(klass_info, next_klass_infos)
fields_not_found = set(requested).difference(fields_found)
if fields_not_found:
invalid_fields = ("'%s'" % s for s in fields_not_found)
raise FieldError(
"Invalid field name(s) given in select_related: %s. "
"Choices are: %s"
% (
", ".join(invalid_fields),
", ".join(_get_field_choices()) or "(none)",
)
)
return related_klass_infos
def get_select_for_update_of_arguments(self):
"""
Return a quoted list of arguments for the SELECT FOR UPDATE OF part of
the query.
"""
def _get_parent_klass_info(klass_info):
concrete_model = klass_info["model"]._meta.concrete_model
for parent_model, parent_link in concrete_model._meta.parents.items():
parent_list = parent_model._meta.get_parent_list()
yield {
"model": parent_model,
"field": parent_link,
"reverse": False,
"select_fields": [
select_index
for select_index in klass_info["select_fields"]
# Selected columns from a model or its parents.
if (
self.select[select_index][0].target.model == parent_model
or self.select[select_index][0].target.model in parent_list
)
],
}
def _get_first_selected_col_from_model(klass_info):
"""
Find the first selected column from a model. If it doesn't exist,
don't lock a model.
select_fields is filled recursively, so it also contains fields
from the parent models.
"""
concrete_model = klass_info["model"]._meta.concrete_model
for select_index in klass_info["select_fields"]:
if self.select[select_index][0].target.model == concrete_model:
return self.select[select_index][0]
def _get_field_choices():
"""Yield all allowed field paths in breadth-first search order."""
queue = collections.deque([(None, self.klass_info)])
while queue:
parent_path, klass_info = queue.popleft()
if parent_path is None:
path = []
yield "self"
else:
field = klass_info["field"]
if klass_info["reverse"]:
field = field.remote_field
path = parent_path + [field.name]
yield LOOKUP_SEP.join(path)
queue.extend(
(path, klass_info)
for klass_info in _get_parent_klass_info(klass_info)
)
queue.extend(
(path, klass_info)
for klass_info in klass_info.get("related_klass_infos", [])
)
if not self.klass_info:
return []
result = []
invalid_names = []
for name in self.query.select_for_update_of:
klass_info = self.klass_info
if name == "self":
col = _get_first_selected_col_from_model(klass_info)
else:
for part in name.split(LOOKUP_SEP):
klass_infos = (
*klass_info.get("related_klass_infos", []),
*_get_parent_klass_info(klass_info),
)
for related_klass_info in klass_infos:
field = related_klass_info["field"]
if related_klass_info["reverse"]:
field = field.remote_field
if field.name == part:
klass_info = related_klass_info
break
else:
klass_info = None
break
if klass_info is None:
invalid_names.append(name)
continue
col = _get_first_selected_col_from_model(klass_info)
if col is not None:
if self.connection.features.select_for_update_of_column:
result.append(self.compile(col)[0])
else:
result.append(self.quote_name_unless_alias(col.alias))
if invalid_names:
raise FieldError(
"Invalid field name(s) given in select_for_update(of=(...)): %s. "
"Only relational fields followed in the query are allowed. "
"Choices are: %s."
% (
", ".join(invalid_names),
", ".join(_get_field_choices()),
)
)
return result
def get_converters(self, expressions):
converters = {}
for i, expression in enumerate(expressions):
if expression:
backend_converters = self.connection.ops.get_db_converters(expression)
field_converters = expression.get_db_converters(self.connection)
if backend_converters or field_converters:
converters[i] = (backend_converters + field_converters, expression)
return converters
def apply_converters(self, rows, converters):
connection = self.connection
converters = list(converters.items())
for row in map(list, rows):
for pos, (convs, expression) in converters:
value = row[pos]
for converter in convs:
value = converter(value, expression, connection)
row[pos] = value
yield row
def results_iter(
self,
results=None,
tuple_expected=False,
chunked_fetch=False,
chunk_size=GET_ITERATOR_CHUNK_SIZE,
):
"""Return an iterator over the results from executing this query."""
if results is None:
results = self.execute_sql(
MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size
)
fields = [s[0] for s in self.select[0 : self.col_count]]
converters = self.get_converters(fields)
rows = chain.from_iterable(results)
if converters:
rows = self.apply_converters(rows, converters)
if tuple_expected:
rows = map(tuple, rows)
return rows
def has_results(self):
"""
Backends (e.g. NoSQL) can override this in order to use optimized
versions of "query has any results."
"""
return bool(self.execute_sql(SINGLE))
def execute_sql(
self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE
):
"""
Run the query against the database and return the result(s). The
return value is a single data item if result_type is SINGLE, or an
iterator over the results if the result_type is MULTI.
result_type is either MULTI (use fetchmany() to retrieve all rows),
SINGLE (only retrieve a single row), or None. In this last case, the
cursor is returned if any query is executed, since it's used by
subclasses such as InsertQuery). It's possible, however, that no query
is needed, as the filters describe an empty set. In that case, None is
returned, to avoid any unnecessary database interaction.
"""
result_type = result_type or NO_RESULTS
try:
sql, params = self.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return iter([])
else:
return
if chunked_fetch:
cursor = self.connection.chunked_cursor()
else:
cursor = self.connection.cursor()
try:
cursor.execute(sql, params)
except Exception:
# Might fail for server-side cursors (e.g. connection closed)
cursor.close()
raise
if result_type == CURSOR:
# Give the caller the cursor to process and close.
return cursor
if result_type == SINGLE:
try:
val = cursor.fetchone()
if val:
return val[0 : self.col_count]
return val
finally:
# done with the cursor
cursor.close()
if result_type == NO_RESULTS:
cursor.close()
return
result = cursor_iter(
cursor,
self.connection.features.empty_fetchmany_value,
self.col_count if self.has_extra_select else None,
chunk_size,
)
if not chunked_fetch or not self.connection.features.can_use_chunked_reads:
# If we are using non-chunked reads, we return the same data
# structure as normally, but ensure it is all read into memory
# before going any further. Use chunked_fetch if requested,
# unless the database doesn't support it.
return list(result)
return result
def as_subquery_condition(self, alias, columns, compiler):
qn = compiler.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
for index, select_col in enumerate(self.query.select):
lhs_sql, lhs_params = self.compile(select_col)
rhs = "%s.%s" % (qn(alias), qn2(columns[index]))
self.query.where.add(RawSQL("%s = %s" % (lhs_sql, rhs), lhs_params), AND)
sql, params = self.as_sql()
return "EXISTS (%s)" % sql, params
def explain_query(self):
result = list(self.execute_sql())
# Some backends return 1 item tuples with strings, and others return
# tuples with integers and strings. Flatten them out into strings.
format_ = self.query.explain_info.format
output_formatter = json.dumps if format_ and format_.lower() == "json" else str
for row in result[0]:
if not isinstance(row, str):
yield " ".join(output_formatter(c) for c in row)
else:
yield row
class SQLInsertCompiler(SQLCompiler):
returning_fields = None
returning_params = ()
def field_as_sql(self, field, val):
"""
Take a field and a value intended to be saved on that field, and
return placeholder SQL and accompanying params. Check for raw values,
expressions, and fields with get_placeholder() defined in that order.
When field is None, consider the value raw and use it as the
placeholder, with no corresponding parameters returned.
"""
if field is None:
# A field value of None means the value is raw.
sql, params = val, []
elif hasattr(val, "as_sql"):
# This is an expression, let's compile it.
sql, params = self.compile(val)
elif hasattr(field, "get_placeholder"):
# Some fields (e.g. geo fields) need special munging before
# they can be inserted.
sql, params = field.get_placeholder(val, self, self.connection), [val]
else:
# Return the common case for the placeholder
sql, params = "%s", [val]
# The following hook is only used by Oracle Spatial, which sometimes
# needs to yield 'NULL' and [] as its placeholder and params instead
# of '%s' and [None]. The 'NULL' placeholder is produced earlier by
# OracleOperations.get_geom_placeholder(). The following line removes
# the corresponding None parameter. See ticket #10888.
params = self.connection.ops.modify_insert_params(sql, params)
return sql, params
def prepare_value(self, field, value):
"""
Prepare a value to be used in a query by resolving it if it is an
expression and otherwise calling the field's get_db_prep_save().
"""
if hasattr(value, "resolve_expression"):
value = value.resolve_expression(
self.query, allow_joins=False, for_save=True
)
# Don't allow values containing Col expressions. They refer to
# existing columns on a row, but in the case of insert the row
# doesn't exist yet.
if value.contains_column_references:
raise ValueError(
'Failed to insert expression "%s" on %s. F() expressions '
"can only be used to update, not to insert." % (value, field)
)
if value.contains_aggregate:
raise FieldError(
"Aggregate functions are not allowed in this query "
"(%s=%r)." % (field.name, value)
)
if value.contains_over_clause:
raise FieldError(
"Window expressions are not allowed in this query (%s=%r)."
% (field.name, value)
)
return field.get_db_prep_save(value, connection=self.connection)
def pre_save_val(self, field, obj):
"""
Get the given field's value off the given obj. pre_save() is used for
things like auto_now on DateTimeField. Skip it if this is a raw query.
"""
if self.query.raw:
return getattr(obj, field.attname)
return field.pre_save(obj, add=True)
def assemble_as_sql(self, fields, value_rows):
"""
Take a sequence of N fields and a sequence of M rows of values, and
generate placeholder SQL and parameters for each field and value.
Return a pair containing:
* a sequence of M rows of N SQL placeholder strings, and
* a sequence of M rows of corresponding parameter values.
Each placeholder string may contain any number of '%s' interpolation
strings, and each parameter row will contain exactly as many params
as the total number of '%s's in the corresponding placeholder row.
"""
if not value_rows:
return [], []
# list of (sql, [params]) tuples for each object to be saved
# Shape: [n_objs][n_fields][2]
rows_of_fields_as_sql = (
(self.field_as_sql(field, v) for field, v in zip(fields, row))
for row in value_rows
)
# tuple like ([sqls], [[params]s]) for each object to be saved
# Shape: [n_objs][2][n_fields]
sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql)
# Extract separate lists for placeholders and params.
# Each of these has shape [n_objs][n_fields]
placeholder_rows, param_rows = zip(*sql_and_param_pair_rows)
# Params for each field are still lists, and need to be flattened.
param_rows = [[p for ps in row for p in ps] for row in param_rows]
return placeholder_rows, param_rows
def as_sql(self):
# We don't need quote_name_unless_alias() here, since these are all
# going to be column names (so we can avoid the extra overhead).
qn = self.connection.ops.quote_name
opts = self.query.get_meta()
insert_statement = self.connection.ops.insert_statement(
on_conflict=self.query.on_conflict,
)
result = ["%s %s" % (insert_statement, qn(opts.db_table))]
fields = self.query.fields or [opts.pk]
result.append("(%s)" % ", ".join(qn(f.column) for f in fields))
if self.query.fields:
value_rows = [
[
self.prepare_value(field, self.pre_save_val(field, obj))
for field in fields
]
for obj in self.query.objs
]
else:
# An empty object.
value_rows = [
[self.connection.ops.pk_default_value()] for _ in self.query.objs
]
fields = [None]
# Currently the backends just accept values when generating bulk
# queries and generate their own placeholders. Doing that isn't
# necessary and it should be possible to use placeholders and
# expressions in bulk inserts too.
can_bulk = (
not self.returning_fields and self.connection.features.has_bulk_insert
)
placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows)
on_conflict_suffix_sql = self.connection.ops.on_conflict_suffix_sql(
fields,
self.query.on_conflict,
(f.column for f in self.query.update_fields),
(f.column for f in self.query.unique_fields),
)
if (
self.returning_fields
and self.connection.features.can_return_columns_from_insert
):
if self.connection.features.can_return_rows_from_bulk_insert:
result.append(
self.connection.ops.bulk_insert_sql(fields, placeholder_rows)
)
params = param_rows
else:
result.append("VALUES (%s)" % ", ".join(placeholder_rows[0]))
params = [param_rows[0]]
if on_conflict_suffix_sql:
result.append(on_conflict_suffix_sql)
# Skip empty r_sql to allow subclasses to customize behavior for
# 3rd party backends. Refs #19096.
r_sql, self.returning_params = self.connection.ops.return_insert_columns(
self.returning_fields
)
if r_sql:
result.append(r_sql)
params += [self.returning_params]
return [(" ".join(result), tuple(chain.from_iterable(params)))]
if can_bulk:
result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
if on_conflict_suffix_sql:
result.append(on_conflict_suffix_sql)
return [(" ".join(result), tuple(p for ps in param_rows for p in ps))]
else:
if on_conflict_suffix_sql:
result.append(on_conflict_suffix_sql)
return [
(" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
for p, vals in zip(placeholder_rows, param_rows)
]
def execute_sql(self, returning_fields=None):
assert not (
returning_fields
and len(self.query.objs) != 1
and not self.connection.features.can_return_rows_from_bulk_insert
)
opts = self.query.get_meta()
self.returning_fields = returning_fields
with self.connection.cursor() as cursor:
for sql, params in self.as_sql():
cursor.execute(sql, params)
if not self.returning_fields:
return []
if (
self.connection.features.can_return_rows_from_bulk_insert
and len(self.query.objs) > 1
):
rows = self.connection.ops.fetch_returned_insert_rows(cursor)
elif self.connection.features.can_return_columns_from_insert:
assert len(self.query.objs) == 1
rows = [
self.connection.ops.fetch_returned_insert_columns(
cursor,
self.returning_params,
)
]
else:
rows = [
(
self.connection.ops.last_insert_id(
cursor,
opts.db_table,
opts.pk.column,
),
)
]
cols = [field.get_col(opts.db_table) for field in self.returning_fields]
converters = self.get_converters(cols)
if converters:
rows = list(self.apply_converters(rows, converters))
return rows
class SQLDeleteCompiler(SQLCompiler):
@cached_property
def single_alias(self):
# Ensure base table is in aliases.
self.query.get_initial_alias()
return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1
@classmethod
def _expr_refs_base_model(cls, expr, base_model):
if isinstance(expr, Query):
return expr.model == base_model
if not hasattr(expr, "get_source_expressions"):
return False
return any(
cls._expr_refs_base_model(source_expr, base_model)
for source_expr in expr.get_source_expressions()
)
@cached_property
def contains_self_reference_subquery(self):
return any(
self._expr_refs_base_model(expr, self.query.model)
for expr in chain(
self.query.annotations.values(), self.query.where.children
)
)
def _as_sql(self, query):
delete = "DELETE FROM %s" % self.quote_name_unless_alias(query.base_table)
try:
where, params = self.compile(query.where)
except FullResultSet:
return delete, ()
return f"{delete} WHERE {where}", tuple(params)
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
if self.single_alias and not self.contains_self_reference_subquery:
return self._as_sql(self.query)
innerq = self.query.clone()
innerq.__class__ = Query
innerq.clear_select_clause()
pk = self.query.model._meta.pk
innerq.select = [pk.get_col(self.query.get_initial_alias())]
outerq = Query(self.query.model)
if not self.connection.features.update_can_self_select:
# Force the materialization of the inner query to allow reference
# to the target table on MySQL.
sql, params = innerq.get_compiler(connection=self.connection).as_sql()
innerq = RawSQL("SELECT * FROM (%s) subquery" % sql, params)
outerq.add_filter("pk__in", innerq)
return self._as_sql(outerq)
class SQLUpdateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
self.pre_sql_setup()
if not self.query.values:
return "", ()
qn = self.quote_name_unless_alias
values, update_params = [], []
for field, model, val in self.query.values:
if hasattr(val, "resolve_expression"):
val = val.resolve_expression(
self.query, allow_joins=False, for_save=True
)
if val.contains_aggregate:
raise FieldError(
"Aggregate functions are not allowed in this query "
"(%s=%r)." % (field.name, val)
)
if val.contains_over_clause:
raise FieldError(
"Window expressions are not allowed in this query "
"(%s=%r)." % (field.name, val)
)
elif hasattr(val, "prepare_database_save"):
if field.remote_field:
val = val.prepare_database_save(field)
else:
raise TypeError(
"Tried to update field %s with a model instance, %r. "
"Use a value compatible with %s."
% (field, val, field.__class__.__name__)
)
val = field.get_db_prep_save(val, connection=self.connection)
# Getting the placeholder for the field.
if hasattr(field, "get_placeholder"):
placeholder = field.get_placeholder(val, self, self.connection)
else:
placeholder = "%s"
name = field.column
if hasattr(val, "as_sql"):
sql, params = self.compile(val)
values.append("%s = %s" % (qn(name), placeholder % sql))
update_params.extend(params)
elif val is not None:
values.append("%s = %s" % (qn(name), placeholder))
update_params.append(val)
else:
values.append("%s = NULL" % qn(name))
table = self.query.base_table
result = [
"UPDATE %s SET" % qn(table),
", ".join(values),
]
try:
where, params = self.compile(self.query.where)
except FullResultSet:
params = []
else:
result.append("WHERE %s" % where)
return " ".join(result), tuple(update_params + params)
def execute_sql(self, result_type):
"""
Execute the specified update. Return the number of rows affected by
the primary update query. The "primary update query" is the first
non-empty query that is executed. Row counts for any subsequent,
related queries are not available.
"""
cursor = super().execute_sql(result_type)
try:
rows = cursor.rowcount if cursor else 0
is_empty = cursor is None
finally:
if cursor:
cursor.close()
for query in self.query.get_related_updates():
aux_rows = query.get_compiler(self.using).execute_sql(result_type)
if is_empty and aux_rows:
rows = aux_rows
is_empty = False
return rows
def pre_sql_setup(self):
"""
If the update depends on results from other tables, munge the "where"
conditions to match the format required for (portable) SQL updates.
If multiple updates are required, pull out the id values to update at
this point so that they don't change as a result of the progressive
updates.
"""
refcounts_before = self.query.alias_refcount.copy()
# Ensure base table is in the query
self.query.get_initial_alias()
count = self.query.count_active_tables()
if not self.query.related_updates and count == 1:
return
query = self.query.chain(klass=Query)
query.select_related = False
query.clear_ordering(force=True)
query.extra = {}
query.select = []
meta = query.get_meta()
fields = [meta.pk.name]
related_ids_index = []
for related in self.query.related_updates:
if all(
path.join_field.primary_key for path in meta.get_path_to_parent(related)
):
# If a primary key chain exists to the targeted related update,
# then the meta.pk value can be used for it.
related_ids_index.append((related, 0))
else:
# This branch will only be reached when updating a field of an
# ancestor that is not part of the primary key chain of a MTI
# tree.
related_ids_index.append((related, len(fields)))
fields.append(related._meta.pk.name)
query.add_fields(fields)
super().pre_sql_setup()
must_pre_select = (
count > 1 and not self.connection.features.update_can_self_select
)
# Now we adjust the current query: reset the where clause and get rid
# of all the tables we don't need (since they're in the sub-select).
self.query.clear_where()
if self.query.related_updates or must_pre_select:
# Either we're using the idents in multiple update queries (so
# don't want them to change), or the db backend doesn't support
# selecting from the updating table (e.g. MySQL).
idents = []
related_ids = collections.defaultdict(list)
for rows in query.get_compiler(self.using).execute_sql(MULTI):
idents.extend(r[0] for r in rows)
for parent, index in related_ids_index:
related_ids[parent].extend(r[index] for r in rows)
self.query.add_filter("pk__in", idents)
self.query.related_ids = related_ids
else:
# The fast path. Filters and updates in one query.
self.query.add_filter("pk__in", query)
self.query.reset_refcounts(refcounts_before)
class SQLAggregateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
sql, params = [], []
for annotation in self.query.annotation_select.values():
ann_sql, ann_params = self.compile(annotation)
ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params)
sql.append(ann_sql)
params.extend(ann_params)
self.col_count = len(self.query.annotation_select)
sql = ", ".join(sql)
params = tuple(params)
inner_query_sql, inner_query_params = self.query.inner_query.get_compiler(
self.using,
elide_empty=self.elide_empty,
).as_sql(with_col_aliases=True)
sql = "SELECT %s FROM (%s) subquery" % (sql, inner_query_sql)
params += inner_query_params
return sql, params
def cursor_iter(cursor, sentinel, col_count, itersize):
"""
Yield blocks of rows from a cursor and ensure the cursor is closed when
done.
"""
try:
for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel):
yield rows if col_count is None else [r[:col_count] for r in rows]
finally:
cursor.close()
|
d0d8593025c8147127ccd9233add03ec6565a058309465050924e6ac6083c781 | from django.db import DatabaseError, InterfaceError
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
minimum_database_version = (19,)
# Oracle crashes with "ORA-00932: inconsistent datatypes: expected - got
# BLOB" when grouping by LOBs (#24096).
allows_group_by_lob = False
allows_group_by_refs = False
interprets_empty_strings_as_nulls = True
has_select_for_update = True
has_select_for_update_nowait = True
has_select_for_update_skip_locked = True
has_select_for_update_of = True
select_for_update_of_column = True
can_return_columns_from_insert = True
supports_subqueries_in_group_by = False
ignores_unnecessary_order_by_in_subqueries = False
supports_transactions = True
supports_timezones = False
has_native_duration_field = True
can_defer_constraint_checks = True
supports_partially_nullable_unique_constraints = False
supports_deferrable_unique_constraints = True
truncates_names = True
supports_comments = True
supports_tablespaces = True
supports_sequence_reset = False
can_introspect_materialized_views = True
atomic_transactions = False
nulls_order_largest = True
requires_literal_defaults = True
closed_cursor_error_class = InterfaceError
bare_select_suffix = " FROM DUAL"
# Select for update with limit can be achieved on Oracle, but not with the
# current backend.
supports_select_for_update_with_limit = False
supports_temporal_subtraction = True
# Oracle doesn't ignore quoted identifiers case but the current backend
# does by uppercasing all identifiers.
ignores_table_name_case = True
supports_index_on_text_field = False
create_test_procedure_without_params_sql = """
CREATE PROCEDURE "TEST_PROCEDURE" AS
V_I INTEGER;
BEGIN
V_I := 1;
END;
"""
create_test_procedure_with_int_param_sql = """
CREATE PROCEDURE "TEST_PROCEDURE" (P_I INTEGER) AS
V_I INTEGER;
BEGIN
V_I := P_I;
END;
"""
create_test_table_with_composite_primary_key = """
CREATE TABLE test_table_composite_pk (
column_1 NUMBER(11) NOT NULL,
column_2 NUMBER(11) NOT NULL,
PRIMARY KEY (column_1, column_2)
)
"""
supports_callproc_kwargs = True
supports_over_clause = True
supports_frame_range_fixed_distance = True
supports_ignore_conflicts = False
max_query_params = 2**16 - 1
supports_partial_indexes = False
can_rename_index = True
supports_slicing_ordering_in_compound = True
requires_compound_order_by_subquery = True
allows_multiple_constraints_on_same_fields = False
supports_boolean_expr_in_select_clause = False
supports_comparing_boolean_expr = False
supports_primitives_in_json_field = False
supports_json_field_contains = False
supports_collation_on_textfield = False
test_collations = {
"ci": "BINARY_CI",
"cs": "BINARY",
"non_default": "SWEDISH_CI",
"swedish_ci": "SWEDISH_CI",
}
test_now_utc_template = "CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"
django_test_skips = {
"Oracle doesn't support SHA224.": {
"db_functions.text.test_sha224.SHA224Tests.test_basic",
"db_functions.text.test_sha224.SHA224Tests.test_transform",
},
"Oracle doesn't correctly calculate ISO 8601 week numbering before "
"1583 (the Gregorian calendar was introduced in 1582).": {
"db_functions.datetime.test_extract_trunc.DateFunctionTests."
"test_trunc_week_before_1000",
"db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests."
"test_trunc_week_before_1000",
},
"Oracle extracts seconds including fractional seconds (#33517).": {
"db_functions.datetime.test_extract_trunc.DateFunctionTests."
"test_extract_second_func_no_fractional",
"db_functions.datetime.test_extract_trunc.DateFunctionWithTimeZoneTests."
"test_extract_second_func_no_fractional",
},
"Oracle doesn't support bitwise XOR.": {
"expressions.tests.ExpressionOperatorTests.test_lefthand_bitwise_xor",
"expressions.tests.ExpressionOperatorTests.test_lefthand_bitwise_xor_null",
"expressions.tests.ExpressionOperatorTests."
"test_lefthand_bitwise_xor_right_null",
},
"Oracle requires ORDER BY in row_number, ANSI:SQL doesn't.": {
"expressions_window.tests.WindowFunctionTests.test_row_number_no_ordering",
},
"Raises ORA-00600: internal error code.": {
"model_fields.test_jsonfield.TestQuerying.test_usage_in_subquery",
},
"Oracle doesn't support changing collations on indexed columns (#33671).": {
"migrations.test_operations.OperationTests."
"test_alter_field_pk_fk_db_collation",
},
}
django_test_expected_failures = {
# A bug in Django/cx_Oracle with respect to string handling (#23843).
"annotations.tests.NonAggregateAnnotationTestCase.test_custom_functions",
"annotations.tests.NonAggregateAnnotationTestCase."
"test_custom_functions_can_ref_other_functions",
}
@cached_property
def introspected_field_types(self):
return {
**super().introspected_field_types,
"GenericIPAddressField": "CharField",
"PositiveBigIntegerField": "BigIntegerField",
"PositiveIntegerField": "IntegerField",
"PositiveSmallIntegerField": "IntegerField",
"SmallIntegerField": "IntegerField",
"TimeField": "DateTimeField",
}
@cached_property
def supports_collation_on_charfield(self):
with self.connection.cursor() as cursor:
try:
cursor.execute("SELECT CAST('a' AS VARCHAR2(4001)) FROM dual")
except DatabaseError as e:
if e.args[0].code == 910:
return False
raise
return True
|
a9790c57c2a514a0e2114b4a5964a7f79200bdfe5bf0f4dda9a5f8c2f7ec68b4 | from collections import namedtuple
import cx_Oracle
from django.db import models
from django.db.backends.base.introspection import BaseDatabaseIntrospection
from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo
from django.db.backends.base.introspection import TableInfo as BaseTableInfo
from django.utils.functional import cached_property
FieldInfo = namedtuple(
"FieldInfo", BaseFieldInfo._fields + ("is_autofield", "is_json", "comment")
)
TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",))
class DatabaseIntrospection(BaseDatabaseIntrospection):
cache_bust_counter = 1
# Maps type objects to Django Field types.
@cached_property
def data_types_reverse(self):
if self.connection.cx_oracle_version < (8,):
return {
cx_Oracle.BLOB: "BinaryField",
cx_Oracle.CLOB: "TextField",
cx_Oracle.DATETIME: "DateField",
cx_Oracle.FIXED_CHAR: "CharField",
cx_Oracle.FIXED_NCHAR: "CharField",
cx_Oracle.INTERVAL: "DurationField",
cx_Oracle.NATIVE_FLOAT: "FloatField",
cx_Oracle.NCHAR: "CharField",
cx_Oracle.NCLOB: "TextField",
cx_Oracle.NUMBER: "DecimalField",
cx_Oracle.STRING: "CharField",
cx_Oracle.TIMESTAMP: "DateTimeField",
}
else:
return {
cx_Oracle.DB_TYPE_DATE: "DateField",
cx_Oracle.DB_TYPE_BINARY_DOUBLE: "FloatField",
cx_Oracle.DB_TYPE_BLOB: "BinaryField",
cx_Oracle.DB_TYPE_CHAR: "CharField",
cx_Oracle.DB_TYPE_CLOB: "TextField",
cx_Oracle.DB_TYPE_INTERVAL_DS: "DurationField",
cx_Oracle.DB_TYPE_NCHAR: "CharField",
cx_Oracle.DB_TYPE_NCLOB: "TextField",
cx_Oracle.DB_TYPE_NVARCHAR: "CharField",
cx_Oracle.DB_TYPE_NUMBER: "DecimalField",
cx_Oracle.DB_TYPE_TIMESTAMP: "DateTimeField",
cx_Oracle.DB_TYPE_VARCHAR: "CharField",
}
def get_field_type(self, data_type, description):
if data_type == cx_Oracle.NUMBER:
precision, scale = description[4:6]
if scale == 0:
if precision > 11:
return (
"BigAutoField"
if description.is_autofield
else "BigIntegerField"
)
elif 1 < precision < 6 and description.is_autofield:
return "SmallAutoField"
elif precision == 1:
return "BooleanField"
elif description.is_autofield:
return "AutoField"
else:
return "IntegerField"
elif scale == -127:
return "FloatField"
elif data_type == cx_Oracle.NCLOB and description.is_json:
return "JSONField"
return super().get_field_type(data_type, description)
def get_table_list(self, cursor):
"""Return a list of table and view names in the current database."""
cursor.execute(
"""
SELECT
user_tables.table_name,
't',
user_tab_comments.comments
FROM user_tables
LEFT OUTER JOIN
user_tab_comments
ON user_tab_comments.table_name = user_tables.table_name
WHERE
NOT EXISTS (
SELECT 1
FROM user_mviews
WHERE user_mviews.mview_name = user_tables.table_name
)
UNION ALL
SELECT view_name, 'v', NULL FROM user_views
UNION ALL
SELECT mview_name, 'v', NULL FROM user_mviews
"""
)
return [
TableInfo(self.identifier_converter(row[0]), row[1], row[2])
for row in cursor.fetchall()
]
def get_table_description(self, cursor, table_name):
"""
Return a description of the table with the DB-API cursor.description
interface.
"""
# user_tab_columns gives data default for columns
cursor.execute(
"""
SELECT
user_tab_cols.column_name,
user_tab_cols.data_default,
CASE
WHEN user_tab_cols.collation = user_tables.default_collation
THEN NULL
ELSE user_tab_cols.collation
END collation,
CASE
WHEN user_tab_cols.char_used IS NULL
THEN user_tab_cols.data_length
ELSE user_tab_cols.char_length
END as display_size,
CASE
WHEN user_tab_cols.identity_column = 'YES' THEN 1
ELSE 0
END as is_autofield,
CASE
WHEN EXISTS (
SELECT 1
FROM user_json_columns
WHERE
user_json_columns.table_name = user_tab_cols.table_name AND
user_json_columns.column_name = user_tab_cols.column_name
)
THEN 1
ELSE 0
END as is_json,
user_col_comments.comments as col_comment
FROM user_tab_cols
LEFT OUTER JOIN
user_tables ON user_tables.table_name = user_tab_cols.table_name
LEFT OUTER JOIN
user_col_comments ON
user_col_comments.column_name = user_tab_cols.column_name AND
user_col_comments.table_name = user_tab_cols.table_name
WHERE user_tab_cols.table_name = UPPER(%s)
""",
[table_name],
)
field_map = {
column: (
display_size,
default if default != "NULL" else None,
collation,
is_autofield,
is_json,
comment,
)
for (
column,
default,
collation,
display_size,
is_autofield,
is_json,
comment,
) in cursor.fetchall()
}
self.cache_bust_counter += 1
cursor.execute(
"SELECT * FROM {} WHERE ROWNUM < 2 AND {} > 0".format(
self.connection.ops.quote_name(table_name), self.cache_bust_counter
)
)
description = []
for desc in cursor.description:
name = desc[0]
(
display_size,
default,
collation,
is_autofield,
is_json,
comment,
) = field_map[name]
name %= {} # cx_Oracle, for some reason, doubles percent signs.
description.append(
FieldInfo(
self.identifier_converter(name),
desc[1],
display_size,
desc[3],
desc[4] or 0,
desc[5] or 0,
*desc[6:],
default,
collation,
is_autofield,
is_json,
comment,
)
)
return description
def identifier_converter(self, name):
"""Identifier comparison is case insensitive under Oracle."""
return name.lower()
def get_sequences(self, cursor, table_name, table_fields=()):
cursor.execute(
"""
SELECT
user_tab_identity_cols.sequence_name,
user_tab_identity_cols.column_name
FROM
user_tab_identity_cols,
user_constraints,
user_cons_columns cols
WHERE
user_constraints.constraint_name = cols.constraint_name
AND user_constraints.table_name = user_tab_identity_cols.table_name
AND cols.column_name = user_tab_identity_cols.column_name
AND user_constraints.constraint_type = 'P'
AND user_tab_identity_cols.table_name = UPPER(%s)
""",
[table_name],
)
# Oracle allows only one identity column per table.
row = cursor.fetchone()
if row:
return [
{
"name": self.identifier_converter(row[0]),
"table": self.identifier_converter(table_name),
"column": self.identifier_converter(row[1]),
}
]
# To keep backward compatibility for AutoFields that aren't Oracle
# identity columns.
for f in table_fields:
if isinstance(f, models.AutoField):
return [{"table": table_name, "column": f.column}]
return []
def get_relations(self, cursor, table_name):
"""
Return a dictionary of {field_name: (field_name_other_table, other_table)}
representing all foreign keys in the given table.
"""
table_name = table_name.upper()
cursor.execute(
"""
SELECT ca.column_name, cb.table_name, cb.column_name
FROM user_constraints, USER_CONS_COLUMNS ca, USER_CONS_COLUMNS cb
WHERE user_constraints.table_name = %s AND
user_constraints.constraint_name = ca.constraint_name AND
user_constraints.r_constraint_name = cb.constraint_name AND
ca.position = cb.position""",
[table_name],
)
return {
self.identifier_converter(field_name): (
self.identifier_converter(rel_field_name),
self.identifier_converter(rel_table_name),
)
for field_name, rel_table_name, rel_field_name in cursor.fetchall()
}
def get_primary_key_columns(self, cursor, table_name):
cursor.execute(
"""
SELECT
cols.column_name
FROM
user_constraints,
user_cons_columns cols
WHERE
user_constraints.constraint_name = cols.constraint_name AND
user_constraints.constraint_type = 'P' AND
user_constraints.table_name = UPPER(%s)
ORDER BY
cols.position
""",
[table_name],
)
return [self.identifier_converter(row[0]) for row in cursor.fetchall()]
def get_constraints(self, cursor, table_name):
"""
Retrieve any constraints or keys (unique, pk, fk, check, index) across
one or more columns.
"""
constraints = {}
# Loop over the constraints, getting PKs, uniques, and checks
cursor.execute(
"""
SELECT
user_constraints.constraint_name,
LISTAGG(LOWER(cols.column_name), ',')
WITHIN GROUP (ORDER BY cols.position),
CASE user_constraints.constraint_type
WHEN 'P' THEN 1
ELSE 0
END AS is_primary_key,
CASE
WHEN user_constraints.constraint_type IN ('P', 'U') THEN 1
ELSE 0
END AS is_unique,
CASE user_constraints.constraint_type
WHEN 'C' THEN 1
ELSE 0
END AS is_check_constraint
FROM
user_constraints
LEFT OUTER JOIN
user_cons_columns cols
ON user_constraints.constraint_name = cols.constraint_name
WHERE
user_constraints.constraint_type = ANY('P', 'U', 'C')
AND user_constraints.table_name = UPPER(%s)
GROUP BY user_constraints.constraint_name, user_constraints.constraint_type
""",
[table_name],
)
for constraint, columns, pk, unique, check in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
"columns": columns.split(","),
"primary_key": pk,
"unique": unique,
"foreign_key": None,
"check": check,
"index": unique, # All uniques come with an index
}
# Foreign key constraints
cursor.execute(
"""
SELECT
cons.constraint_name,
LISTAGG(LOWER(cols.column_name), ',')
WITHIN GROUP (ORDER BY cols.position),
LOWER(rcols.table_name),
LOWER(rcols.column_name)
FROM
user_constraints cons
INNER JOIN
user_cons_columns rcols
ON rcols.constraint_name = cons.r_constraint_name AND rcols.position = 1
LEFT OUTER JOIN
user_cons_columns cols
ON cons.constraint_name = cols.constraint_name
WHERE
cons.constraint_type = 'R' AND
cons.table_name = UPPER(%s)
GROUP BY cons.constraint_name, rcols.table_name, rcols.column_name
""",
[table_name],
)
for constraint, columns, other_table, other_column in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
"primary_key": False,
"unique": False,
"foreign_key": (other_table, other_column),
"check": False,
"index": False,
"columns": columns.split(","),
}
# Now get indexes
cursor.execute(
"""
SELECT
ind.index_name,
LOWER(ind.index_type),
LOWER(ind.uniqueness),
LISTAGG(LOWER(cols.column_name), ',')
WITHIN GROUP (ORDER BY cols.column_position),
LISTAGG(cols.descend, ',') WITHIN GROUP (ORDER BY cols.column_position)
FROM
user_ind_columns cols, user_indexes ind
WHERE
cols.table_name = UPPER(%s) AND
NOT EXISTS (
SELECT 1
FROM user_constraints cons
WHERE ind.index_name = cons.index_name
) AND cols.index_name = ind.index_name
GROUP BY ind.index_name, ind.index_type, ind.uniqueness
""",
[table_name],
)
for constraint, type_, unique, columns, orders in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
"primary_key": False,
"unique": unique == "unique",
"foreign_key": None,
"check": False,
"index": True,
"type": "idx" if type_ == "normal" else type_,
"columns": columns.split(","),
"orders": orders.split(","),
}
return constraints
|
f08edd223c1ab81162f79db6772fccb4ae771753e19c91bcf4fe8bec1eadc181 | import copy
import datetime
import re
from django.db import DatabaseError
from django.db.backends.base.schema import (
BaseDatabaseSchemaEditor,
_related_non_m2m_objects,
)
from django.utils.duration import duration_iso_string
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s"
sql_alter_column_type = "MODIFY %(column)s %(type)s%(collation)s"
sql_alter_column_null = "MODIFY %(column)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s NOT NULL"
sql_alter_column_default = "MODIFY %(column)s DEFAULT %(default)s"
sql_alter_column_no_default = "MODIFY %(column)s DEFAULT NULL"
sql_alter_column_no_default_null = sql_alter_column_no_default
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"
sql_create_column_inline_fk = (
"CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s"
)
sql_delete_table = "DROP TABLE %(table)s CASCADE CONSTRAINTS"
sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s"
def quote_value(self, value):
if isinstance(value, (datetime.date, datetime.time, datetime.datetime)):
return "'%s'" % value
elif isinstance(value, datetime.timedelta):
return "'%s'" % duration_iso_string(value)
elif isinstance(value, str):
return "'%s'" % value.replace("'", "''").replace("%", "%%")
elif isinstance(value, (bytes, bytearray, memoryview)):
return "'%s'" % value.hex()
elif isinstance(value, bool):
return "1" if value else "0"
else:
return str(value)
def remove_field(self, model, field):
# If the column is an identity column, drop the identity before
# removing the field.
if self._is_identity_column(model._meta.db_table, field.column):
self._drop_identity(model._meta.db_table, field.column)
super().remove_field(model, field)
def delete_model(self, model):
# Run superclass action
super().delete_model(model)
# Clean up manually created sequence.
self.execute(
"""
DECLARE
i INTEGER;
BEGIN
SELECT COUNT(1) INTO i FROM USER_SEQUENCES
WHERE SEQUENCE_NAME = '%(sq_name)s';
IF i = 1 THEN
EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"';
END IF;
END;
/"""
% {
"sq_name": self.connection.ops._get_no_autofield_sequence_name(
model._meta.db_table
)
}
)
def alter_field(self, model, old_field, new_field, strict=False):
try:
super().alter_field(model, old_field, new_field, strict)
except DatabaseError as e:
description = str(e)
# If we're changing type to an unsupported type we need a
# SQLite-ish workaround
if "ORA-22858" in description or "ORA-22859" in description:
self._alter_field_type_workaround(model, old_field, new_field)
# If an identity column is changing to a non-numeric type, drop the
# identity first.
elif "ORA-30675" in description:
self._drop_identity(model._meta.db_table, old_field.column)
self.alter_field(model, old_field, new_field, strict)
# If a primary key column is changing to an identity column, drop
# the primary key first.
elif "ORA-30673" in description and old_field.primary_key:
self._delete_primary_key(model, strict=True)
self._alter_field_type_workaround(model, old_field, new_field)
# If a collation is changing on a primary key, drop the primary key
# first.
elif "ORA-43923" in description and old_field.primary_key:
self._delete_primary_key(model, strict=True)
self.alter_field(model, old_field, new_field, strict)
# Restore a primary key, if needed.
if new_field.primary_key:
self.execute(self._create_primary_key_sql(model, new_field))
else:
raise
def _alter_field_type_workaround(self, model, old_field, new_field):
"""
Oracle refuses to change from some type to other type.
What we need to do instead is:
- Add a nullable version of the desired field with a temporary name. If
the new column is an auto field, then the temporary column can't be
nullable.
- Update the table to transfer values from old to new
- Drop old column
- Rename the new column and possibly drop the nullable property
"""
# Make a new field that's like the new one but with a temporary
# column name.
new_temp_field = copy.deepcopy(new_field)
new_temp_field.null = new_field.get_internal_type() not in (
"AutoField",
"BigAutoField",
"SmallAutoField",
)
new_temp_field.column = self._generate_temp_name(new_field.column)
# Add it
self.add_field(model, new_temp_field)
# Explicit data type conversion
# https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf
# /Data-Type-Comparison-Rules.html#GUID-D0C5A47E-6F93-4C2D-9E49-4F2B86B359DD
new_value = self.quote_name(old_field.column)
old_type = old_field.db_type(self.connection)
if re.match("^N?CLOB", old_type):
new_value = "TO_CHAR(%s)" % new_value
old_type = "VARCHAR2"
if re.match("^N?VARCHAR2", old_type):
new_internal_type = new_field.get_internal_type()
if new_internal_type == "DateField":
new_value = "TO_DATE(%s, 'YYYY-MM-DD')" % new_value
elif new_internal_type == "DateTimeField":
new_value = "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')" % new_value
elif new_internal_type == "TimeField":
# TimeField are stored as TIMESTAMP with a 1900-01-01 date part.
new_value = "CONCAT('1900-01-01 ', %s)" % new_value
new_value = "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')" % new_value
# Transfer values across
self.execute(
"UPDATE %s set %s=%s"
% (
self.quote_name(model._meta.db_table),
self.quote_name(new_temp_field.column),
new_value,
)
)
# Drop the old field
self.remove_field(model, old_field)
# Rename and possibly make the new field NOT NULL
super().alter_field(model, new_temp_field, new_field)
# Recreate foreign key (if necessary) because the old field is not
# passed to the alter_field() and data types of new_temp_field and
# new_field always match.
new_type = new_field.db_type(self.connection)
if (
(old_field.primary_key and new_field.primary_key)
or (old_field.unique and new_field.unique)
) and old_type != new_type:
for _, rel in _related_non_m2m_objects(new_temp_field, new_field):
if rel.field.db_constraint:
self.execute(
self._create_fk_sql(rel.related_model, rel.field, "_fk")
)
def _alter_column_type_sql(
self, model, old_field, new_field, new_type, old_collation, new_collation
):
auto_field_types = {"AutoField", "BigAutoField", "SmallAutoField"}
# Drop the identity if migrating away from AutoField.
if (
old_field.get_internal_type() in auto_field_types
and new_field.get_internal_type() not in auto_field_types
and self._is_identity_column(model._meta.db_table, new_field.column)
):
self._drop_identity(model._meta.db_table, new_field.column)
return super()._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
def normalize_name(self, name):
"""
Get the properly shortened and uppercased identifier as returned by
quote_name() but without the quotes.
"""
nn = self.quote_name(name)
if nn[0] == '"' and nn[-1] == '"':
nn = nn[1:-1]
return nn
def _generate_temp_name(self, for_name):
"""Generate temporary names for workarounds that need temp columns."""
suffix = hex(hash(for_name)).upper()[1:]
return self.normalize_name(for_name + "_" + suffix)
def prepare_default(self, value):
return self.quote_value(value)
def _field_should_be_indexed(self, model, field):
create_index = super()._field_should_be_indexed(model, field)
db_type = field.db_type(self.connection)
if (
db_type is not None
and db_type.lower() in self.connection._limited_data_types
):
return False
return create_index
def _is_identity_column(self, table_name, column_name):
with self.connection.cursor() as cursor:
cursor.execute(
"""
SELECT
CASE WHEN identity_column = 'YES' THEN 1 ELSE 0 END
FROM user_tab_cols
WHERE table_name = %s AND
column_name = %s
""",
[self.normalize_name(table_name), self.normalize_name(column_name)],
)
row = cursor.fetchone()
return row[0] if row else False
def _drop_identity(self, table_name, column_name):
self.execute(
"ALTER TABLE %(table)s MODIFY %(column)s DROP IDENTITY"
% {
"table": self.quote_name(table_name),
"column": self.quote_name(column_name),
}
)
def _get_default_collation(self, table_name):
with self.connection.cursor() as cursor:
cursor.execute(
"""
SELECT default_collation FROM user_tables WHERE table_name = %s
""",
[self.normalize_name(table_name)],
)
return cursor.fetchone()[0]
def _collate_sql(self, collation, old_collation=None, table_name=None):
if collation is None and old_collation is not None:
collation = self._get_default_collation(table_name)
return super()._collate_sql(collation, old_collation, table_name)
|
59d0da40d818c98c123b3c047960ec05d5ac94a3daa3859717be9fc9748602d4 | from django.db import ProgrammingError
from django.utils.functional import cached_property
class BaseDatabaseFeatures:
# An optional tuple indicating the minimum supported database version.
minimum_database_version = None
gis_enabled = False
# Oracle can't group by LOB (large object) data types.
allows_group_by_lob = True
allows_group_by_selected_pks = False
allows_group_by_refs = True
empty_fetchmany_value = []
update_can_self_select = True
# Does the backend distinguish between '' and None?
interprets_empty_strings_as_nulls = False
# Does the backend allow inserting duplicate NULL rows in a nullable
# unique field? All core backends implement this correctly, but other
# databases such as SQL Server do not.
supports_nullable_unique_constraints = True
# Does the backend allow inserting duplicate rows when a unique_together
# constraint exists and some fields are nullable but not all of them?
supports_partially_nullable_unique_constraints = True
# Does the backend support initially deferrable unique constraints?
supports_deferrable_unique_constraints = False
can_use_chunked_reads = True
can_return_columns_from_insert = False
can_return_rows_from_bulk_insert = False
has_bulk_insert = True
uses_savepoints = True
can_release_savepoints = False
# If True, don't use integer foreign keys referring to, e.g., positive
# integer primary keys.
related_fields_match_type = False
allow_sliced_subqueries_with_in = True
has_select_for_update = False
has_select_for_update_nowait = False
has_select_for_update_skip_locked = False
has_select_for_update_of = False
has_select_for_no_key_update = False
# Does the database's SELECT FOR UPDATE OF syntax require a column rather
# than a table?
select_for_update_of_column = False
# Does the default test database allow multiple connections?
# Usually an indication that the test database is in-memory
test_db_allows_multiple_connections = True
# Can an object be saved without an explicit primary key?
supports_unspecified_pk = False
# Can a fixture contain forward references? i.e., are
# FK constraints checked at the end of transaction, or
# at the end of each save operation?
supports_forward_references = True
# Does the backend truncate names properly when they are too long?
truncates_names = False
# Is there a REAL datatype in addition to floats/doubles?
has_real_datatype = False
supports_subqueries_in_group_by = True
# Does the backend ignore unnecessary ORDER BY clauses in subqueries?
ignores_unnecessary_order_by_in_subqueries = True
# Is there a true datatype for uuid?
has_native_uuid_field = False
# Is there a true datatype for timedeltas?
has_native_duration_field = False
# Does the database driver supports same type temporal data subtraction
# by returning the type used to store duration field?
supports_temporal_subtraction = False
# Does the __regex lookup support backreferencing and grouping?
supports_regex_backreferencing = True
# Can date/datetime lookups be performed using a string?
supports_date_lookup_using_string = True
# Can datetimes with timezones be used?
supports_timezones = True
# Does the database have a copy of the zoneinfo database?
has_zoneinfo_database = True
# When performing a GROUP BY, is an ORDER BY NULL required
# to remove any ordering?
requires_explicit_null_ordering_when_grouping = False
# Does the backend order NULL values as largest or smallest?
nulls_order_largest = False
# Does the backend support NULLS FIRST and NULLS LAST in ORDER BY?
supports_order_by_nulls_modifier = True
# Does the backend orders NULLS FIRST by default?
order_by_nulls_first = False
# The database's limit on the number of query parameters.
max_query_params = None
# Can an object have an autoincrement primary key of 0?
allows_auto_pk_0 = True
# Do we need to NULL a ForeignKey out, or can the constraint check be
# deferred
can_defer_constraint_checks = False
# Does the backend support tablespaces? Default to False because it isn't
# in the SQL standard.
supports_tablespaces = False
# Does the backend reset sequences between tests?
supports_sequence_reset = True
# Can the backend introspect the default value of a column?
can_introspect_default = True
# Confirm support for introspected foreign keys
# Every database can do this reliably, except MySQL,
# which can't do it for MyISAM tables
can_introspect_foreign_keys = True
# Map fields which some backends may not be able to differentiate to the
# field it's introspected as.
introspected_field_types = {
"AutoField": "AutoField",
"BigAutoField": "BigAutoField",
"BigIntegerField": "BigIntegerField",
"BinaryField": "BinaryField",
"BooleanField": "BooleanField",
"CharField": "CharField",
"DurationField": "DurationField",
"GenericIPAddressField": "GenericIPAddressField",
"IntegerField": "IntegerField",
"PositiveBigIntegerField": "PositiveBigIntegerField",
"PositiveIntegerField": "PositiveIntegerField",
"PositiveSmallIntegerField": "PositiveSmallIntegerField",
"SmallAutoField": "SmallAutoField",
"SmallIntegerField": "SmallIntegerField",
"TimeField": "TimeField",
}
# Can the backend introspect the column order (ASC/DESC) for indexes?
supports_index_column_ordering = True
# Does the backend support introspection of materialized views?
can_introspect_materialized_views = False
# Support for the DISTINCT ON clause
can_distinct_on_fields = False
# Does the backend prevent running SQL queries in broken transactions?
atomic_transactions = True
# Can we roll back DDL in a transaction?
can_rollback_ddl = False
schema_editor_uses_clientside_param_binding = False
# Does it support operations requiring references rename in a transaction?
supports_atomic_references_rename = True
# Can we issue more than one ALTER COLUMN clause in an ALTER TABLE?
supports_combined_alters = False
# Does it support foreign keys?
supports_foreign_keys = True
# Can it create foreign key constraints inline when adding columns?
can_create_inline_fk = True
# Can an index be renamed?
can_rename_index = False
# Does it automatically index foreign keys?
indexes_foreign_keys = True
# Does it support CHECK constraints?
supports_column_check_constraints = True
supports_table_check_constraints = True
# Does the backend support introspection of CHECK constraints?
can_introspect_check_constraints = True
# Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value})
# parameter passing? Note this can be provided by the backend even if not
# supported by the Python driver
supports_paramstyle_pyformat = True
# Does the backend require literal defaults, rather than parameterized ones?
requires_literal_defaults = False
# Does the backend require a connection reset after each material schema change?
connection_persists_old_columns = False
# What kind of error does the backend throw when accessing closed cursor?
closed_cursor_error_class = ProgrammingError
# Does 'a' LIKE 'A' match?
has_case_insensitive_like = False
# Suffix for backends that don't support "SELECT xxx;" queries.
bare_select_suffix = ""
# If NULL is implied on columns without needing to be explicitly specified
implied_column_null = False
# Does the backend support "select for update" queries with limit (and offset)?
supports_select_for_update_with_limit = True
# Does the backend ignore null expressions in GREATEST and LEAST queries unless
# every expression is null?
greatest_least_ignores_nulls = False
# Can the backend clone databases for parallel test execution?
# Defaults to False to allow third-party backends to opt-in.
can_clone_databases = False
# Does the backend consider table names with different casing to
# be equal?
ignores_table_name_case = False
# Place FOR UPDATE right after FROM clause. Used on MSSQL.
for_update_after_from = False
# Combinatorial flags
supports_select_union = True
supports_select_intersection = True
supports_select_difference = True
supports_slicing_ordering_in_compound = False
supports_parentheses_in_compound = True
requires_compound_order_by_subquery = False
# Does the database support SQL 2003 FILTER (WHERE ...) in aggregate
# expressions?
supports_aggregate_filter_clause = False
# Does the backend support indexing a TextField?
supports_index_on_text_field = True
# Does the backend support window expressions (expression OVER (...))?
supports_over_clause = False
supports_frame_range_fixed_distance = False
only_supports_unbounded_with_preceding_and_following = False
# Does the backend support CAST with precision?
supports_cast_with_precision = True
# How many second decimals does the database return when casting a value to
# a type with time?
time_cast_precision = 6
# SQL to create a procedure for use by the Django test suite. The
# functionality of the procedure isn't important.
create_test_procedure_without_params_sql = None
create_test_procedure_with_int_param_sql = None
# SQL to create a table with a composite primary key for use by the Django
# test suite.
create_test_table_with_composite_primary_key = None
# Does the backend support keyword parameters for cursor.callproc()?
supports_callproc_kwargs = False
# What formats does the backend EXPLAIN syntax support?
supported_explain_formats = set()
# Does the backend support the default parameter in lead() and lag()?
supports_default_in_lead_lag = True
# Does the backend support ignoring constraint or uniqueness errors during
# INSERT?
supports_ignore_conflicts = True
# Does the backend support updating rows on constraint or uniqueness errors
# during INSERT?
supports_update_conflicts = False
supports_update_conflicts_with_target = False
# Does this backend require casting the results of CASE expressions used
# in UPDATE statements to ensure the expression has the correct type?
requires_casted_case_in_updates = False
# Does the backend support partial indexes (CREATE INDEX ... WHERE ...)?
supports_partial_indexes = True
supports_functions_in_partial_indexes = True
# Does the backend support covering indexes (CREATE INDEX ... INCLUDE ...)?
supports_covering_indexes = False
# Does the backend support indexes on expressions?
supports_expression_indexes = True
# Does the backend treat COLLATE as an indexed expression?
collate_as_index_expression = False
# Does the database allow more than one constraint or index on the same
# field(s)?
allows_multiple_constraints_on_same_fields = True
# Does the backend support boolean expressions in SELECT and GROUP BY
# clauses?
supports_boolean_expr_in_select_clause = True
# Does the backend support comparing boolean expressions in WHERE clauses?
# Eg: WHERE (price > 0) IS NOT NULL
supports_comparing_boolean_expr = True
# Does the backend support JSONField?
supports_json_field = True
# Can the backend introspect a JSONField?
can_introspect_json_field = True
# Does the backend support primitives in JSONField?
supports_primitives_in_json_field = True
# Is there a true datatype for JSON?
has_native_json_field = False
# Does the backend use PostgreSQL-style JSON operators like '->'?
has_json_operators = False
# Does the backend support __contains and __contained_by lookups for
# a JSONField?
supports_json_field_contains = True
# Does value__d__contains={'f': 'g'} (without a list around the dict) match
# {'d': [{'f': 'g'}]}?
json_key_contains_list_matching_requires_list = False
# Does the backend support JSONObject() database function?
has_json_object_function = True
# Does the backend support column collations?
supports_collation_on_charfield = True
supports_collation_on_textfield = True
# Does the backend support non-deterministic collations?
supports_non_deterministic_collations = True
# Does the backend support column and table comments?
supports_comments = False
# Does the backend support column comments in ADD COLUMN statements?
supports_comments_inline = False
# Does the backend support the logical XOR operator?
supports_logical_xor = False
# Set to (exception, message) if null characters in text are disallowed.
prohibits_null_characters_in_text_exception = None
# Does the backend support unlimited character columns?
supports_unlimited_charfield = False
# Collation names for use by the Django test suite.
test_collations = {
"ci": None, # Case-insensitive.
"cs": None, # Case-sensitive.
"non_default": None, # Non-default.
"swedish_ci": None, # Swedish case-insensitive.
}
# SQL template override for tests.aggregation.tests.NowUTC
test_now_utc_template = None
# A set of dotted paths to tests in Django's test suite that are expected
# to fail on this database.
django_test_expected_failures = set()
# A map of reasons to sets of dotted paths to tests in Django's test suite
# that should be skipped for this database.
django_test_skips = {}
def __init__(self, connection):
self.connection = connection
@cached_property
def supports_explaining_query_execution(self):
"""Does this backend support explaining query execution?"""
return self.connection.ops.explain_prefix is not None
@cached_property
def supports_transactions(self):
"""Confirm support for transactions."""
with self.connection.cursor() as cursor:
cursor.execute("CREATE TABLE ROLLBACK_TEST (X INT)")
self.connection.set_autocommit(False)
cursor.execute("INSERT INTO ROLLBACK_TEST (X) VALUES (8)")
self.connection.rollback()
self.connection.set_autocommit(True)
cursor.execute("SELECT COUNT(X) FROM ROLLBACK_TEST")
(count,) = cursor.fetchone()
cursor.execute("DROP TABLE ROLLBACK_TEST")
return count == 0
def allows_group_by_selected_pks_on_model(self, model):
if not self.allows_group_by_selected_pks:
return False
return model._meta.managed
|
e9fe7f5a23d9cb077be39335dcc40e56d9f77c1b6ef17c583a5bd88aee7e724b | import datetime
import decimal
import json
from importlib import import_module
import sqlparse
from django.conf import settings
from django.db import NotSupportedError, transaction
from django.db.backends import utils
from django.utils import timezone
from django.utils.encoding import force_str
class BaseDatabaseOperations:
"""
Encapsulate backend-specific differences, such as the way a backend
performs ordering or calculates the ID of a recently-inserted row.
"""
compiler_module = "django.db.models.sql.compiler"
# Integer field safe ranges by `internal_type` as documented
# in docs/ref/models/fields.txt.
integer_field_ranges = {
"SmallIntegerField": (-32768, 32767),
"IntegerField": (-2147483648, 2147483647),
"BigIntegerField": (-9223372036854775808, 9223372036854775807),
"PositiveBigIntegerField": (0, 9223372036854775807),
"PositiveSmallIntegerField": (0, 32767),
"PositiveIntegerField": (0, 2147483647),
"SmallAutoField": (-32768, 32767),
"AutoField": (-2147483648, 2147483647),
"BigAutoField": (-9223372036854775808, 9223372036854775807),
}
set_operators = {
"union": "UNION",
"intersection": "INTERSECT",
"difference": "EXCEPT",
}
# Mapping of Field.get_internal_type() (typically the model field's class
# name) to the data type to use for the Cast() function, if different from
# DatabaseWrapper.data_types.
cast_data_types = {}
# CharField data type if the max_length argument isn't provided.
cast_char_field_without_max_length = None
# Start and end points for window expressions.
PRECEDING = "PRECEDING"
FOLLOWING = "FOLLOWING"
UNBOUNDED_PRECEDING = "UNBOUNDED " + PRECEDING
UNBOUNDED_FOLLOWING = "UNBOUNDED " + FOLLOWING
CURRENT_ROW = "CURRENT ROW"
# Prefix for EXPLAIN queries, or None EXPLAIN isn't supported.
explain_prefix = None
def __init__(self, connection):
self.connection = connection
self._cache = None
def autoinc_sql(self, table, column):
"""
Return any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary.
This SQL is executed when a table is created.
"""
return None
def bulk_batch_size(self, fields, objs):
"""
Return the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
"""
return len(objs)
def format_for_duration_arithmetic(self, sql):
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a "
"format_for_duration_arithmetic() method."
)
def cache_key_culling_sql(self):
"""
Return an SQL query that retrieves the first cache key greater than the
n smallest.
This is used by the 'db' cache backend to determine where to start
culling.
"""
cache_key = self.quote_name("cache_key")
return f"SELECT {cache_key} FROM %s ORDER BY {cache_key} LIMIT 1 OFFSET %%s"
def unification_cast_sql(self, output_field):
"""
Given a field instance, return the SQL that casts the result of a union
to that type. The resulting string should contain a '%s' placeholder
for the expression being cast.
"""
return "%s"
def date_extract_sql(self, lookup_type, sql, params):
"""
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
extracts a value from the given date field field_name.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a date_extract_sql() "
"method"
)
def date_trunc_sql(self, lookup_type, sql, params, tzname=None):
"""
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
truncates the given date or datetime field field_name to a date object
with only the given specificity.
If `tzname` is provided, the given value is truncated in a specific
timezone.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a date_trunc_sql() "
"method."
)
def datetime_cast_date_sql(self, sql, params, tzname):
"""
Return the SQL to cast a datetime value to date value.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a "
"datetime_cast_date_sql() method."
)
def datetime_cast_time_sql(self, sql, params, tzname):
"""
Return the SQL to cast a datetime value to time value.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a "
"datetime_cast_time_sql() method"
)
def datetime_extract_sql(self, lookup_type, sql, params, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that extracts a value from the given
datetime field field_name.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a datetime_extract_sql() "
"method"
)
def datetime_trunc_sql(self, lookup_type, sql, params, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that truncates the given datetime field
field_name to a datetime object with only the given specificity.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a datetime_trunc_sql() "
"method"
)
def time_trunc_sql(self, lookup_type, sql, params, tzname=None):
"""
Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
that truncates the given time or datetime field field_name to a time
object with only the given specificity.
If `tzname` is provided, the given value is truncated in a specific
timezone.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a time_trunc_sql() method"
)
def time_extract_sql(self, lookup_type, sql, params):
"""
Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
that extracts a value from the given time field field_name.
"""
return self.date_extract_sql(lookup_type, sql, params)
def deferrable_sql(self):
"""
Return the SQL to make a constraint "initially deferred" during a
CREATE TABLE statement.
"""
return ""
def distinct_sql(self, fields, params):
"""
Return an SQL DISTINCT clause which removes duplicate rows from the
result set. If any fields are given, only check the given fields for
duplicates.
"""
if fields:
raise NotSupportedError(
"DISTINCT ON fields is not supported by this database backend"
)
else:
return ["DISTINCT"], []
def fetch_returned_insert_columns(self, cursor, returning_params):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the newly created data.
"""
return cursor.fetchone()
def field_cast_sql(self, db_type, internal_type):
"""
Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
(e.g. 'GenericIPAddressField'), return the SQL to cast it before using
it in a WHERE statement. The resulting string should contain a '%s'
placeholder for the column being searched against.
"""
return "%s"
def force_no_ordering(self):
"""
Return a list used in the "ORDER BY" clause to force no ordering at
all. Return an empty list to include nothing in the ordering.
"""
return []
def for_update_sql(self, nowait=False, skip_locked=False, of=(), no_key=False):
"""
Return the FOR UPDATE SQL clause to lock rows for an update operation.
"""
return "FOR%s UPDATE%s%s%s" % (
" NO KEY" if no_key else "",
" OF %s" % ", ".join(of) if of else "",
" NOWAIT" if nowait else "",
" SKIP LOCKED" if skip_locked else "",
)
def _get_limit_offset_params(self, low_mark, high_mark):
offset = low_mark or 0
if high_mark is not None:
return (high_mark - offset), offset
elif offset:
return self.connection.ops.no_limit_value(), offset
return None, offset
def limit_offset_sql(self, low_mark, high_mark):
"""Return LIMIT/OFFSET SQL clause."""
limit, offset = self._get_limit_offset_params(low_mark, high_mark)
return " ".join(
sql
for sql in (
("LIMIT %d" % limit) if limit else None,
("OFFSET %d" % offset) if offset else None,
)
if sql
)
def last_executed_query(self, cursor, sql, params):
"""
Return a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
"""
# Convert params to contain string values.
def to_string(s):
return force_str(s, strings_only=True, errors="replace")
if isinstance(params, (list, tuple)):
u_params = tuple(to_string(val) for val in params)
elif params is None:
u_params = ()
else:
u_params = {to_string(k): to_string(v) for k, v in params.items()}
return "QUERY = %r - PARAMS = %r" % (sql, u_params)
def last_insert_id(self, cursor, table_name, pk_name):
"""
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, return the newly created ID.
`pk_name` is the name of the primary-key column.
"""
return cursor.lastrowid
def lookup_cast(self, lookup_type, internal_type=None):
"""
Return the string to use in a query when performing lookups
("contains", "like", etc.). It should contain a '%s' placeholder for
the column being searched against.
"""
return "%s"
def max_in_list_size(self):
"""
Return the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
"""
return None
def max_name_length(self):
"""
Return the maximum length of table and column names, or None if there
is no limit.
"""
return None
def no_limit_value(self):
"""
Return the value to use for the LIMIT when we are wanting "LIMIT
infinity". Return None if the limit clause can be omitted in this case.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a no_limit_value() method"
)
def pk_default_value(self):
"""
Return the value to use during an INSERT statement to specify that
the field should use its default value.
"""
return "DEFAULT"
def prepare_sql_script(self, sql):
"""
Take an SQL script that may contain multiple lines and return a list
of statements to feed to successive cursor.execute() calls.
Since few databases are able to process raw SQL scripts in a single
cursor.execute() call and PEP 249 doesn't talk about this use case,
the default implementation is conservative.
"""
return [
sqlparse.format(statement, strip_comments=True)
for statement in sqlparse.split(sql)
if statement
]
def process_clob(self, value):
"""
Return the value of a CLOB column, for backends that return a locator
object that requires additional processing.
"""
return value
def return_insert_columns(self, fields):
"""
For backends that support returning columns as part of an insert query,
return the SQL and params to append to the INSERT query. The returned
fragment should contain a format string to hold the appropriate column.
"""
pass
def compiler(self, compiler_name):
"""
Return the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
"""
if self._cache is None:
self._cache = import_module(self.compiler_module)
return getattr(self._cache, compiler_name)
def quote_name(self, name):
"""
Return a quoted version of the given table, index, or column name. Do
not quote the given name if it's already been quoted.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a quote_name() method"
)
def regex_lookup(self, lookup_type):
"""
Return the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). It should contain a '%s'
placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), raise
NotImplementedError.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a regex_lookup() method"
)
def savepoint_create_sql(self, sid):
"""
Return the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
"""
return "SAVEPOINT %s" % self.quote_name(sid)
def savepoint_commit_sql(self, sid):
"""
Return the SQL for committing the given savepoint.
"""
return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
def savepoint_rollback_sql(self, sid):
"""
Return the SQL for rolling back the given savepoint.
"""
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
def set_time_zone_sql(self):
"""
Return the SQL that will set the connection's time zone.
Return '' if the backend doesn't support time zones.
"""
return ""
def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
"""
Return a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
If `reset_sequences` is True, the list includes SQL statements required
to reset the sequences.
The `allow_cascade` argument determines whether truncation may cascade
to tables with foreign keys pointing the tables being truncated.
PostgreSQL requires a cascade even if these tables are empty.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations must provide an sql_flush() method"
)
def execute_sql_flush(self, sql_list):
"""Execute a list of SQL statements to flush the database."""
with transaction.atomic(
using=self.connection.alias,
savepoint=self.connection.features.can_rollback_ddl,
):
with self.connection.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql)
def sequence_reset_by_name_sql(self, style, sequences):
"""
Return a list of the SQL statements required to reset sequences
passed in `sequences`.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return []
def sequence_reset_sql(self, style, model_list):
"""
Return a list of the SQL statements required to reset sequences for
the given models.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return [] # No sequence reset required by default.
def start_transaction_sql(self):
"""Return the SQL statement required to start a transaction."""
return "BEGIN;"
def end_transaction_sql(self, success=True):
"""Return the SQL statement required to end a transaction."""
if not success:
return "ROLLBACK;"
return "COMMIT;"
def tablespace_sql(self, tablespace, inline=False):
"""
Return the SQL that will be used in a query to define the tablespace.
Return '' if the backend doesn't support tablespaces.
If `inline` is True, append the SQL to a row; otherwise append it to
the entire CREATE TABLE or CREATE INDEX statement.
"""
return ""
def prep_for_like_query(self, x):
"""Prepare a value for use in a LIKE query."""
return str(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_")
# Same as prep_for_like_query(), but called for "iexact" matches, which
# need not necessarily be implemented using "LIKE" in the backend.
prep_for_iexact_query = prep_for_like_query
def validate_autopk_value(self, value):
"""
Certain backends do not accept some values for "serial" fields
(for example zero in MySQL). Raise a ValueError if the value is
invalid, otherwise return the validated value.
"""
return value
def adapt_unknown_value(self, value):
"""
Transform a value to something compatible with the backend driver.
This method only depends on the type of the value. It's designed for
cases where the target type isn't known, such as .raw() SQL queries.
As a consequence it may not work perfectly in all circumstances.
"""
if isinstance(value, datetime.datetime): # must be before date
return self.adapt_datetimefield_value(value)
elif isinstance(value, datetime.date):
return self.adapt_datefield_value(value)
elif isinstance(value, datetime.time):
return self.adapt_timefield_value(value)
elif isinstance(value, decimal.Decimal):
return self.adapt_decimalfield_value(value)
else:
return value
def adapt_integerfield_value(self, value, internal_type):
return value
def adapt_datefield_value(self, value):
"""
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
"""
if value is None:
return None
return str(value)
def adapt_datetimefield_value(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
"""
if value is None:
return None
# Expression values are adapted by the database.
if hasattr(value, "resolve_expression"):
return value
return str(value)
def adapt_timefield_value(self, value):
"""
Transform a time value to an object compatible with what is expected
by the backend driver for time columns.
"""
if value is None:
return None
# Expression values are adapted by the database.
if hasattr(value, "resolve_expression"):
return value
if timezone.is_aware(value):
raise ValueError("Django does not support timezone-aware times.")
return str(value)
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
"""
Transform a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
"""
return utils.format_number(value, max_digits, decimal_places)
def adapt_ipaddressfield_value(self, value):
"""
Transform a string representation of an IP address into the expected
type for the backend driver.
"""
return value or None
def adapt_json_value(self, value, encoder):
return json.dumps(value, cls=encoder)
def year_lookup_bounds_for_date_field(self, value, iso_year=False):
"""
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year
lookup.
`value` is an int, containing the looked-up year.
If `iso_year` is True, return bounds for ISO-8601 week-numbering years.
"""
if iso_year:
first = datetime.date.fromisocalendar(value, 1, 1)
second = datetime.date.fromisocalendar(
value + 1, 1, 1
) - datetime.timedelta(days=1)
else:
first = datetime.date(value, 1, 1)
second = datetime.date(value, 12, 31)
first = self.adapt_datefield_value(first)
second = self.adapt_datefield_value(second)
return [first, second]
def year_lookup_bounds_for_datetime_field(self, value, iso_year=False):
"""
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateTimeField value using a year
lookup.
`value` is an int, containing the looked-up year.
If `iso_year` is True, return bounds for ISO-8601 week-numbering years.
"""
if iso_year:
first = datetime.datetime.fromisocalendar(value, 1, 1)
second = datetime.datetime.fromisocalendar(
value + 1, 1, 1
) - datetime.timedelta(microseconds=1)
else:
first = datetime.datetime(value, 1, 1)
second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
if settings.USE_TZ:
tz = timezone.get_current_timezone()
first = timezone.make_aware(first, tz)
second = timezone.make_aware(second, tz)
first = self.adapt_datetimefield_value(first)
second = self.adapt_datetimefield_value(second)
return [first, second]
def get_db_converters(self, expression):
"""
Return a list of functions needed to convert field data.
Some field types on some backends do not provide data in the correct
format, this is the hook for converter functions.
"""
return []
def convert_durationfield_value(self, value, expression, connection):
if value is not None:
return datetime.timedelta(0, 0, value)
def check_expression_support(self, expression):
"""
Check that the backend supports the provided expression.
This is used on specific backends to rule out known expressions
that have problematic or nonexistent implementations. If the
expression has a known problem, the backend should raise
NotSupportedError.
"""
pass
def conditional_expression_supported_in_where_clause(self, expression):
"""
Return True, if the conditional expression is supported in the WHERE
clause.
"""
return True
def combine_expression(self, connector, sub_expressions):
"""
Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions).
"""
conn = " %s " % connector
return conn.join(sub_expressions)
def combine_duration_expression(self, connector, sub_expressions):
return self.combine_expression(connector, sub_expressions)
def binary_placeholder_sql(self, value):
"""
Some backends require special syntax to insert binary content (MySQL
for example uses '_binary %s').
"""
return "%s"
def modify_insert_params(self, placeholder, params):
"""
Allow modification of insert parameters. Needed for Oracle Spatial
backend due to #10888.
"""
return params
def integer_field_range(self, internal_type):
"""
Given an integer field internal type (e.g. 'PositiveIntegerField'),
return a tuple of the (min_value, max_value) form representing the
range of the column type bound to the field.
"""
return self.integer_field_ranges[internal_type]
def subtract_temporals(self, internal_type, lhs, rhs):
if self.connection.features.supports_temporal_subtraction:
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
return "(%s - %s)" % (lhs_sql, rhs_sql), (*lhs_params, *rhs_params)
raise NotSupportedError(
"This backend does not support %s subtraction." % internal_type
)
def window_frame_start(self, start):
if isinstance(start, int):
if start < 0:
return "%d %s" % (abs(start), self.PRECEDING)
elif start == 0:
return self.CURRENT_ROW
elif start is None:
return self.UNBOUNDED_PRECEDING
raise ValueError(
"start argument must be a negative integer, zero, or None, but got '%s'."
% start
)
def window_frame_end(self, end):
if isinstance(end, int):
if end == 0:
return self.CURRENT_ROW
elif end > 0:
return "%d %s" % (end, self.FOLLOWING)
elif end is None:
return self.UNBOUNDED_FOLLOWING
raise ValueError(
"end argument must be a positive integer, zero, or None, but got '%s'."
% end
)
def window_frame_rows_start_end(self, start=None, end=None):
"""
Return SQL for start and end points in an OVER clause window frame.
"""
if not self.connection.features.supports_over_clause:
raise NotSupportedError("This backend does not support window expressions.")
return self.window_frame_start(start), self.window_frame_end(end)
def window_frame_range_start_end(self, start=None, end=None):
start_, end_ = self.window_frame_rows_start_end(start, end)
features = self.connection.features
if features.only_supports_unbounded_with_preceding_and_following and (
(start and start < 0) or (end and end > 0)
):
raise NotSupportedError(
"%s only supports UNBOUNDED together with PRECEDING and "
"FOLLOWING." % self.connection.display_name
)
return start_, end_
def explain_query_prefix(self, format=None, **options):
if not self.connection.features.supports_explaining_query_execution:
raise NotSupportedError(
"This backend does not support explaining query execution."
)
if format:
supported_formats = self.connection.features.supported_explain_formats
normalized_format = format.upper()
if normalized_format not in supported_formats:
msg = "%s is not a recognized format." % normalized_format
if supported_formats:
msg += " Allowed formats: %s" % ", ".join(sorted(supported_formats))
else:
msg += (
f" {self.connection.display_name} does not support any formats."
)
raise ValueError(msg)
if options:
raise ValueError("Unknown options: %s" % ", ".join(sorted(options.keys())))
return self.explain_prefix
def insert_statement(self, on_conflict=None):
return "INSERT INTO"
def on_conflict_suffix_sql(self, fields, on_conflict, update_fields, unique_fields):
return ""
|
a2dccbc3d78a27ae9c8a25a3d347b1719143993d69ec00ef3f3dd89338aa4e1f | import logging
import operator
from datetime import datetime
from django.conf import settings
from django.db.backends.ddl_references import (
Columns,
Expressions,
ForeignKeyName,
IndexName,
Statement,
Table,
)
from django.db.backends.utils import names_digest, split_identifier
from django.db.models import Deferrable, Index
from django.db.models.sql import Query
from django.db.transaction import TransactionManagementError, atomic
from django.utils import timezone
logger = logging.getLogger("django.db.backends.schema")
def _is_relevant_relation(relation, altered_field):
"""
When altering the given field, must constraints on its model from the given
relation be temporarily dropped?
"""
field = relation.field
if field.many_to_many:
# M2M reverse field
return False
if altered_field.primary_key and field.to_fields == [None]:
# Foreign key constraint on the primary key, which is being altered.
return True
# Is the constraint targeting the field being altered?
return altered_field.name in field.to_fields
def _all_related_fields(model):
# Related fields must be returned in a deterministic order.
return sorted(
model._meta._get_fields(
forward=False,
reverse=True,
include_hidden=True,
include_parents=False,
),
key=operator.attrgetter("name"),
)
def _related_non_m2m_objects(old_field, new_field):
# Filter out m2m objects from reverse relations.
# Return (old_relation, new_relation) tuples.
related_fields = zip(
(
obj
for obj in _all_related_fields(old_field.model)
if _is_relevant_relation(obj, old_field)
),
(
obj
for obj in _all_related_fields(new_field.model)
if _is_relevant_relation(obj, new_field)
),
)
for old_rel, new_rel in related_fields:
yield old_rel, new_rel
yield from _related_non_m2m_objects(
old_rel.remote_field,
new_rel.remote_field,
)
class BaseDatabaseSchemaEditor:
"""
This class and its subclasses are responsible for emitting schema-changing
statements to the databases - model creation/removal/alteration, field
renaming, index fiddling, and so on.
"""
# Overrideable SQL templates
sql_create_table = "CREATE TABLE %(table)s (%(definition)s)"
sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s"
sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s"
sql_delete_table = "DROP TABLE %(table)s CASCADE"
sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s"
sql_alter_column = "ALTER TABLE %(table)s %(changes)s"
sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s"
sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL"
sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL"
sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s"
sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT"
sql_alter_column_no_default_null = sql_alter_column_no_default
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE"
sql_rename_column = (
"ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s"
)
sql_update_with_default = (
"UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL"
)
sql_unique_constraint = "UNIQUE (%(columns)s)%(deferrable)s"
sql_check_constraint = "CHECK (%(check)s)"
sql_delete_constraint = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_constraint = "CONSTRAINT %(name)s %(constraint)s"
sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)"
sql_delete_check = sql_delete_constraint
sql_create_unique = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s "
"UNIQUE (%(columns)s)%(deferrable)s"
)
sql_delete_unique = sql_delete_constraint
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s"
)
sql_create_inline_fk = None
sql_create_column_inline_fk = None
sql_delete_fk = sql_delete_constraint
sql_create_index = (
"CREATE INDEX %(name)s ON %(table)s "
"(%(columns)s)%(include)s%(extra)s%(condition)s"
)
sql_create_unique_index = (
"CREATE UNIQUE INDEX %(name)s ON %(table)s "
"(%(columns)s)%(include)s%(condition)s"
)
sql_rename_index = "ALTER INDEX %(old_name)s RENAME TO %(new_name)s"
sql_delete_index = "DROP INDEX %(name)s"
sql_create_pk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
)
sql_delete_pk = sql_delete_constraint
sql_delete_procedure = "DROP PROCEDURE %(procedure)s"
sql_alter_table_comment = "COMMENT ON TABLE %(table)s IS %(comment)s"
sql_alter_column_comment = "COMMENT ON COLUMN %(table)s.%(column)s IS %(comment)s"
def __init__(self, connection, collect_sql=False, atomic=True):
self.connection = connection
self.collect_sql = collect_sql
if self.collect_sql:
self.collected_sql = []
self.atomic_migration = self.connection.features.can_rollback_ddl and atomic
# State-managing methods
def __enter__(self):
self.deferred_sql = []
if self.atomic_migration:
self.atomic = atomic(self.connection.alias)
self.atomic.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
for sql in self.deferred_sql:
self.execute(sql)
if self.atomic_migration:
self.atomic.__exit__(exc_type, exc_value, traceback)
# Core utility functions
def execute(self, sql, params=()):
"""Execute the given SQL statement, with optional parameters."""
# Don't perform the transactional DDL check if SQL is being collected
# as it's not going to be executed anyway.
if (
not self.collect_sql
and self.connection.in_atomic_block
and not self.connection.features.can_rollback_ddl
):
raise TransactionManagementError(
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
# Account for non-string statement objects.
sql = str(sql)
# Log the command we're running, then run it
logger.debug(
"%s; (params %r)", sql, params, extra={"params": params, "sql": sql}
)
if self.collect_sql:
ending = "" if sql.rstrip().endswith(";") else ";"
if params is not None:
self.collected_sql.append(
(sql % tuple(map(self.quote_value, params))) + ending
)
else:
self.collected_sql.append(sql + ending)
else:
with self.connection.cursor() as cursor:
cursor.execute(sql, params)
def quote_name(self, name):
return self.connection.ops.quote_name(name)
def table_sql(self, model):
"""Take a model and return its table definition."""
# Add any unique_togethers (always deferred, as some fields might be
# created afterward, like geometry fields with some backends).
for field_names in model._meta.unique_together:
fields = [model._meta.get_field(field) for field in field_names]
self.deferred_sql.append(self._create_unique_sql(model, fields))
# Create column SQL, add FK deferreds if needed.
column_sqls = []
params = []
for field in model._meta.local_fields:
# SQL.
definition, extra_params = self.column_sql(model, field)
if definition is None:
continue
# Check constraints can go on the column SQL here.
db_params = field.db_parameters(connection=self.connection)
if db_params["check"]:
definition += " " + self.sql_check_constraint % db_params
# Autoincrement SQL (for backends with inline variant).
col_type_suffix = field.db_type_suffix(connection=self.connection)
if col_type_suffix:
definition += " %s" % col_type_suffix
params.extend(extra_params)
# FK.
if field.remote_field and field.db_constraint:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(
field.remote_field.field_name
).column
if self.sql_create_inline_fk:
definition += " " + self.sql_create_inline_fk % {
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
}
elif self.connection.features.supports_foreign_keys:
self.deferred_sql.append(
self._create_fk_sql(
model, field, "_fk_%(to_table)s_%(to_column)s"
)
)
# Add the SQL to our big list.
column_sqls.append(
"%s %s"
% (
self.quote_name(field.column),
definition,
)
)
# Autoincrement SQL (for backends with post table definition
# variant).
if field.get_internal_type() in (
"AutoField",
"BigAutoField",
"SmallAutoField",
):
autoinc_sql = self.connection.ops.autoinc_sql(
model._meta.db_table, field.column
)
if autoinc_sql:
self.deferred_sql.extend(autoinc_sql)
constraints = [
constraint.constraint_sql(model, self)
for constraint in model._meta.constraints
]
sql = self.sql_create_table % {
"table": self.quote_name(model._meta.db_table),
"definition": ", ".join(
str(constraint)
for constraint in (*column_sqls, *constraints)
if constraint
),
}
if model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(
model._meta.db_tablespace
)
if tablespace_sql:
sql += " " + tablespace_sql
return sql, params
# Field <-> database mapping functions
def _iter_column_sql(
self, column_db_type, params, model, field, field_db_params, include_default
):
yield column_db_type
if collation := field_db_params.get("collation"):
yield self._collate_sql(collation)
if self.connection.features.supports_comments_inline and field.db_comment:
yield self._comment_sql(field.db_comment)
# Work out nullability.
null = field.null
# Include a default value, if requested.
include_default = (
include_default
and not self.skip_default(field)
and
# Don't include a default value if it's a nullable field and the
# default cannot be dropped in the ALTER COLUMN statement (e.g.
# MySQL longtext and longblob).
not (null and self.skip_default_on_alter(field))
)
if include_default:
default_value = self.effective_default(field)
if default_value is not None:
column_default = "DEFAULT " + self._column_default_sql(field)
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (Oracle).
# If this is the case, the individual schema backend should
# implement prepare_default().
yield column_default % self.prepare_default(default_value)
else:
yield column_default
params.append(default_value)
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if (
field.empty_strings_allowed
and not field.primary_key
and self.connection.features.interprets_empty_strings_as_nulls
):
null = True
if not null:
yield "NOT NULL"
elif not self.connection.features.implied_column_null:
yield "NULL"
if field.primary_key:
yield "PRIMARY KEY"
elif field.unique:
yield "UNIQUE"
# Optionally add the tablespace if it's an implicitly indexed column.
tablespace = field.db_tablespace or model._meta.db_tablespace
if (
tablespace
and self.connection.features.supports_tablespaces
and field.unique
):
yield self.connection.ops.tablespace_sql(tablespace, inline=True)
def column_sql(self, model, field, include_default=False):
"""
Return the column definition for a field. The field must already have
had set_attributes_from_name() called.
"""
# Get the column's type and use that as the basis of the SQL.
field_db_params = field.db_parameters(connection=self.connection)
column_db_type = field_db_params["type"]
# Check for fields that aren't actually columns (e.g. M2M).
if column_db_type is None:
return None, None
params = []
return (
" ".join(
# This appends to the params being returned.
self._iter_column_sql(
column_db_type,
params,
model,
field,
field_db_params,
include_default,
)
),
params,
)
def skip_default(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob).
"""
return False
def skip_default_on_alter(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob) in the ALTER COLUMN statement.
"""
return False
def prepare_default(self, value):
"""
Only used for backends which have requires_literal_defaults feature
"""
raise NotImplementedError(
"subclasses of BaseDatabaseSchemaEditor for backends which have "
"requires_literal_defaults must provide a prepare_default() method"
)
def _column_default_sql(self, field):
"""
Return the SQL to use in a DEFAULT clause. The resulting string should
contain a '%s' placeholder for a default value.
"""
return "%s"
@staticmethod
def _effective_default(field):
# This method allows testing its logic without a connection.
if field.has_default():
default = field.get_default()
elif not field.null and field.blank and field.empty_strings_allowed:
if field.get_internal_type() == "BinaryField":
default = b""
else:
default = ""
elif getattr(field, "auto_now", False) or getattr(field, "auto_now_add", False):
internal_type = field.get_internal_type()
if internal_type == "DateTimeField":
default = timezone.now()
else:
default = datetime.now()
if internal_type == "DateField":
default = default.date()
elif internal_type == "TimeField":
default = default.time()
else:
default = None
return default
def effective_default(self, field):
"""Return a field's effective database default value."""
return field.get_db_prep_save(self._effective_default(field), self.connection)
def quote_value(self, value):
"""
Return a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends (defaults are not user-defined, though,
so this is safe).
"""
raise NotImplementedError()
# Actions
def create_model(self, model):
"""
Create a table and any accompanying indexes or unique constraints for
the given `model`.
"""
sql, params = self.table_sql(model)
# Prevent using [] as params, in the case a literal '%' is used in the
# definition.
self.execute(sql, params or None)
if self.connection.features.supports_comments:
# Add table comment.
if model._meta.db_table_comment:
self.alter_db_table_comment(model, None, model._meta.db_table_comment)
# Add column comments.
if not self.connection.features.supports_comments_inline:
for field in model._meta.local_fields:
if field.db_comment:
field_db_params = field.db_parameters(
connection=self.connection
)
field_type = field_db_params["type"]
self.execute(
*self._alter_column_comment_sql(
model, field, field_type, field.db_comment
)
)
# Add any field index and index_together's (deferred as SQLite
# _remake_table needs it).
self.deferred_sql.extend(self._model_indexes_sql(model))
# Make M2M tables
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.create_model(field.remote_field.through)
def delete_model(self, model):
"""Delete a model from the database."""
# Handle auto-created intermediary models
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.delete_model(field.remote_field.through)
# Delete the table
self.execute(
self.sql_delete_table
% {
"table": self.quote_name(model._meta.db_table),
}
)
# Remove all deferred statements referencing the deleted table.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_table(
model._meta.db_table
):
self.deferred_sql.remove(sql)
def add_index(self, model, index):
"""Add an index on a model."""
if (
index.contains_expressions
and not self.connection.features.supports_expression_indexes
):
return None
# Index.create_sql returns interpolated SQL which makes params=None a
# necessity to avoid escaping attempts on execution.
self.execute(index.create_sql(model, self), params=None)
def remove_index(self, model, index):
"""Remove an index from a model."""
if (
index.contains_expressions
and not self.connection.features.supports_expression_indexes
):
return None
self.execute(index.remove_sql(model, self))
def rename_index(self, model, old_index, new_index):
if self.connection.features.can_rename_index:
self.execute(
self._rename_index_sql(model, old_index.name, new_index.name),
params=None,
)
else:
self.remove_index(model, old_index)
self.add_index(model, new_index)
def add_constraint(self, model, constraint):
"""Add a constraint to a model."""
sql = constraint.create_sql(model, self)
if sql:
# Constraint.create_sql returns interpolated SQL which makes
# params=None a necessity to avoid escaping attempts on execution.
self.execute(sql, params=None)
def remove_constraint(self, model, constraint):
"""Remove a constraint from a model."""
sql = constraint.remove_sql(model, self)
if sql:
self.execute(sql)
def alter_unique_together(self, model, old_unique_together, new_unique_together):
"""
Deal with a model changing its unique_together. The input
unique_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_unique_together}
news = {tuple(fields) for fields in new_unique_together}
# Deleted uniques
for fields in olds.difference(news):
self._delete_composed_index(
model,
fields,
{"unique": True, "primary_key": False},
self.sql_delete_unique,
)
# Created uniques
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_unique_sql(model, fields))
def alter_index_together(self, model, old_index_together, new_index_together):
"""
Deal with a model changing its index_together. The input
index_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_index_together}
news = {tuple(fields) for fields in new_index_together}
# Deleted indexes
for fields in olds.difference(news):
self._delete_composed_index(
model,
fields,
{"index": True, "unique": False},
self.sql_delete_index,
)
# Created indexes
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_index_sql(model, fields=fields, suffix="_idx"))
def _delete_composed_index(self, model, fields, constraint_kwargs, sql):
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
meta_index_names = {constraint.name for constraint in model._meta.indexes}
columns = [model._meta.get_field(field).column for field in fields]
constraint_names = self._constraint_names(
model,
columns,
exclude=meta_constraint_names | meta_index_names,
**constraint_kwargs,
)
if (
constraint_kwargs.get("unique") is True
and constraint_names
and self.connection.features.allows_multiple_constraints_on_same_fields
):
# Constraint matching the unique_together name.
default_name = str(
self._unique_constraint_name(model._meta.db_table, columns, quote=False)
)
if default_name in constraint_names:
constraint_names = [default_name]
if len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of constraints for %s(%s)"
% (
len(constraint_names),
model._meta.db_table,
", ".join(columns),
)
)
self.execute(self._delete_constraint_sql(sql, model, constraint_names[0]))
def alter_db_table(self, model, old_db_table, new_db_table):
"""Rename the table a model points to."""
if old_db_table == new_db_table or (
self.connection.features.ignores_table_name_case
and old_db_table.lower() == new_db_table.lower()
):
return
self.execute(
self.sql_rename_table
% {
"old_table": self.quote_name(old_db_table),
"new_table": self.quote_name(new_db_table),
}
)
# Rename all references to the old table name.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_table_references(old_db_table, new_db_table)
def alter_db_table_comment(self, model, old_db_table_comment, new_db_table_comment):
self.execute(
self.sql_alter_table_comment
% {
"table": self.quote_name(model._meta.db_table),
"comment": self.quote_value(new_db_table_comment or ""),
}
)
def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace):
"""Move a model's table between tablespaces."""
self.execute(
self.sql_retablespace_table
% {
"table": self.quote_name(model._meta.db_table),
"old_tablespace": self.quote_name(old_db_tablespace),
"new_tablespace": self.quote_name(new_db_tablespace),
}
)
def add_field(self, model, field):
"""
Create a field on a model. Usually involves adding a column, but may
involve adding a table instead (for M2M fields).
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.create_model(field.remote_field.through)
# Get the column's definition
definition, params = self.column_sql(model, field, include_default=True)
# It might not actually have a column behind it
if definition is None:
return
if col_type_suffix := field.db_type_suffix(connection=self.connection):
definition += f" {col_type_suffix}"
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params["check"]:
definition += " " + self.sql_check_constraint % db_params
if (
field.remote_field
and self.connection.features.supports_foreign_keys
and field.db_constraint
):
constraint_suffix = "_fk_%(to_table)s_%(to_column)s"
# Add FK constraint inline, if supported.
if self.sql_create_column_inline_fk:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(
field.remote_field.field_name
).column
namespace, _ = split_identifier(model._meta.db_table)
definition += " " + self.sql_create_column_inline_fk % {
"name": self._fk_constraint_name(model, field, constraint_suffix),
"namespace": "%s." % self.quote_name(namespace)
if namespace
else "",
"column": self.quote_name(field.column),
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
"deferrable": self.connection.ops.deferrable_sql(),
}
# Otherwise, add FK constraints later.
else:
self.deferred_sql.append(
self._create_fk_sql(model, field, constraint_suffix)
)
# Build the SQL and run it
sql = self.sql_create_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
"definition": definition,
}
self.execute(sql, params)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if (
not self.skip_default_on_alter(field)
and self.effective_default(field) is not None
):
changes_sql, params = self._alter_column_default_sql(
model, None, field, drop=True
)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Add field comment, if required.
if (
field.db_comment
and self.connection.features.supports_comments
and not self.connection.features.supports_comments_inline
):
field_type = db_params["type"]
self.execute(
*self._alter_column_comment_sql(
model, field, field_type, field.db_comment
)
)
# Add an index, if required
self.deferred_sql.extend(self._field_indexes_sql(model, field))
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def remove_field(self, model, field):
"""
Remove a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.delete_model(field.remote_field.through)
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)["type"] is None:
return
# Drop any FK constraints, MySQL requires explicit deletion
if field.remote_field:
fk_names = self._constraint_names(model, [field.column], foreign_key=True)
for fk_name in fk_names:
self.execute(self._delete_fk_sql(model, fk_name))
# Delete the column
sql = self.sql_delete_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
# Remove all deferred statements referencing the deleted column.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_column(
model._meta.db_table, field.column
):
self.deferred_sql.remove(sql)
def alter_field(self, model, old_field, new_field, strict=False):
"""
Allow a field's type, uniqueness, nullability, default, column,
constraints, etc. to be modified.
`old_field` is required to compute the necessary changes.
If `strict` is True, raise errors if the old column does not match
`old_field` precisely.
"""
if not self._field_should_be_altered(old_field, new_field):
return
# Ensure this field is even column-based
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params["type"]
new_db_params = new_field.db_parameters(connection=self.connection)
new_type = new_db_params["type"]
if (old_type is None and old_field.remote_field is None) or (
new_type is None and new_field.remote_field is None
):
raise ValueError(
"Cannot alter field %s into %s - they do not properly define "
"db_type (are you using a badly-written custom field?)"
% (old_field, new_field),
)
elif (
old_type is None
and new_type is None
and (
old_field.remote_field.through
and new_field.remote_field.through
and old_field.remote_field.through._meta.auto_created
and new_field.remote_field.through._meta.auto_created
)
):
return self._alter_many_to_many(model, old_field, new_field, strict)
elif (
old_type is None
and new_type is None
and (
old_field.remote_field.through
and new_field.remote_field.through
and not old_field.remote_field.through._meta.auto_created
and not new_field.remote_field.through._meta.auto_created
)
):
# Both sides have through models; this is a no-op.
return
elif old_type is None or new_type is None:
raise ValueError(
"Cannot alter field %s into %s - they are not compatible types "
"(you cannot alter to or from M2M fields, or add or remove "
"through= on M2M fields)" % (old_field, new_field)
)
self._alter_field(
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict,
)
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
"""Perform a "physical" (non-ManyToMany) field update."""
# Drop any FK constraints, we'll remake them later
fks_dropped = set()
if (
self.connection.features.supports_foreign_keys
and old_field.remote_field
and old_field.db_constraint
and self._field_should_be_altered(
old_field,
new_field,
ignore={"db_comment"},
)
):
fk_names = self._constraint_names(
model, [old_field.column], foreign_key=True
)
if strict and len(fk_names) != 1:
raise ValueError(
"Found wrong number (%s) of foreign key constraints for %s.%s"
% (
len(fk_names),
model._meta.db_table,
old_field.column,
)
)
for fk_name in fk_names:
fks_dropped.add((old_field.column,))
self.execute(self._delete_fk_sql(model, fk_name))
# Has unique been removed?
if old_field.unique and (
not new_field.unique or self._field_became_primary_key(old_field, new_field)
):
# Find the unique constraint for this field
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
constraint_names = self._constraint_names(
model,
[old_field.column],
unique=True,
primary_key=False,
exclude=meta_constraint_names,
)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of unique constraints for %s.%s"
% (
len(constraint_names),
model._meta.db_table,
old_field.column,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_unique_sql(model, constraint_name))
# Drop incoming FK constraints if the field is a primary key or unique,
# which might be a to_field target, and things are going to change.
old_collation = old_db_params.get("collation")
new_collation = new_db_params.get("collation")
drop_foreign_keys = (
self.connection.features.supports_foreign_keys
and (
(old_field.primary_key and new_field.primary_key)
or (old_field.unique and new_field.unique)
)
and ((old_type != new_type) or (old_collation != new_collation))
)
if drop_foreign_keys:
# '_meta.related_field' also contains M2M reverse fields, these
# will be filtered out
for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field):
rel_fk_names = self._constraint_names(
new_rel.related_model, [new_rel.field.column], foreign_key=True
)
for fk_name in rel_fk_names:
self.execute(self._delete_fk_sql(new_rel.related_model, fk_name))
# Removed an index? (no strict check, as multiple indexes are possible)
# Remove indexes if db_index switched to False or a unique constraint
# will now be used in lieu of an index. The following lines from the
# truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# True | False | False | False
# True | False | False | True
# True | False | True | True
if (
old_field.db_index
and not old_field.unique
and (not new_field.db_index or new_field.unique)
):
# Find the index for this field
meta_index_names = {index.name for index in model._meta.indexes}
# Retrieve only BTREE indexes since this is what's created with
# db_index=True.
index_names = self._constraint_names(
model,
[old_field.column],
index=True,
type_=Index.suffix,
exclude=meta_index_names,
)
for index_name in index_names:
# The only way to check if an index was created with
# db_index=True or with Index(['field'], name='foo')
# is to look at its name (refs #28053).
self.execute(self._delete_index_sql(model, index_name))
# Change check constraints?
if old_db_params["check"] != new_db_params["check"] and old_db_params["check"]:
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
constraint_names = self._constraint_names(
model,
[old_field.column],
check=True,
exclude=meta_constraint_names,
)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of check constraints for %s.%s"
% (
len(constraint_names),
model._meta.db_table,
old_field.column,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_check_sql(model, constraint_name))
# Have they renamed the column?
if old_field.column != new_field.column:
self.execute(
self._rename_field_sql(
model._meta.db_table, old_field, new_field, new_type
)
)
# Rename all references to the renamed column.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_column_references(
model._meta.db_table, old_field.column, new_field.column
)
# Next, start accumulating actions to do
actions = []
null_actions = []
post_actions = []
# Type suffix change? (e.g. auto increment).
old_type_suffix = old_field.db_type_suffix(connection=self.connection)
new_type_suffix = new_field.db_type_suffix(connection=self.connection)
# Type, collation, or comment change?
if (
old_type != new_type
or old_type_suffix != new_type_suffix
or old_collation != new_collation
or (
self.connection.features.supports_comments
and old_field.db_comment != new_field.db_comment
)
):
fragment, other_actions = self._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
actions.append(fragment)
post_actions.extend(other_actions)
# When changing a column NULL constraint to NOT NULL with a given
# default value, we need to perform 4 steps:
# 1. Add a default for new incoming writes
# 2. Update existing NULL rows with new default
# 3. Replace NULL constraint with NOT NULL
# 4. Drop the default again.
# Default change?
needs_database_default = False
if old_field.null and not new_field.null:
old_default = self.effective_default(old_field)
new_default = self.effective_default(new_field)
if (
not self.skip_default_on_alter(new_field)
and old_default != new_default
and new_default is not None
):
needs_database_default = True
actions.append(
self._alter_column_default_sql(model, old_field, new_field)
)
# Nullability change?
if old_field.null != new_field.null:
fragment = self._alter_column_null_sql(model, old_field, new_field)
if fragment:
null_actions.append(fragment)
# Only if we have a default and there is a change from NULL to NOT NULL
four_way_default_alteration = new_field.has_default() and (
old_field.null and not new_field.null
)
if actions or null_actions:
if not four_way_default_alteration:
# If we don't have to do a 4-way default alteration we can
# directly run a (NOT) NULL alteration
actions += null_actions
# Combine actions together if we can (e.g. postgres)
if self.connection.features.supports_combined_alters and actions:
sql, params = tuple(zip(*actions))
actions = [(", ".join(sql), sum(params, []))]
# Apply those actions
for sql, params in actions:
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if four_way_default_alteration:
# Update existing rows with default value
self.execute(
self.sql_update_with_default
% {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
)
# Since we didn't run a NOT NULL change before we need to do it
# now
for sql, params in null_actions:
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if post_actions:
for sql, params in post_actions:
self.execute(sql, params)
# If primary_key changed to False, delete the primary key constraint.
if old_field.primary_key and not new_field.primary_key:
self._delete_primary_key(model, strict)
# Added a unique?
if self._unique_should_be_added(old_field, new_field):
self.execute(self._create_unique_sql(model, [new_field]))
# Added an index? Add an index if db_index switched to True or a unique
# constraint will no longer be used in lieu of an index. The following
# lines from the truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# False | False | True | False
# False | True | True | False
# True | True | True | False
if (
(not old_field.db_index or old_field.unique)
and new_field.db_index
and not new_field.unique
):
self.execute(self._create_index_sql(model, fields=[new_field]))
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = []
if drop_foreign_keys:
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Changed to become primary key?
if self._field_became_primary_key(old_field, new_field):
# Make the new one
self.execute(self._create_primary_key_sql(model, new_field))
# Update all referencing columns
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Handle our type alters on the other end of rels from the PK stuff above
for old_rel, new_rel in rels_to_update:
rel_db_params = new_rel.field.db_parameters(connection=self.connection)
rel_type = rel_db_params["type"]
rel_collation = rel_db_params.get("collation")
old_rel_db_params = old_rel.field.db_parameters(connection=self.connection)
old_rel_collation = old_rel_db_params.get("collation")
fragment, other_actions = self._alter_column_type_sql(
new_rel.related_model,
old_rel.field,
new_rel.field,
rel_type,
old_rel_collation,
rel_collation,
)
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(new_rel.related_model._meta.db_table),
"changes": fragment[0],
},
fragment[1],
)
for sql, params in other_actions:
self.execute(sql, params)
# Does it have a foreign key?
if (
self.connection.features.supports_foreign_keys
and new_field.remote_field
and (
fks_dropped or not old_field.remote_field or not old_field.db_constraint
)
and new_field.db_constraint
):
self.execute(
self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s")
)
# Rebuild FKs that pointed to us if we previously had to drop them
if drop_foreign_keys:
for _, rel in rels_to_update:
if rel.field.db_constraint:
self.execute(
self._create_fk_sql(rel.related_model, rel.field, "_fk")
)
# Does it have check constraints we need to add?
if old_db_params["check"] != new_db_params["check"] and new_db_params["check"]:
constraint_name = self._create_index_name(
model._meta.db_table, [new_field.column], suffix="_check"
)
self.execute(
self._create_check_sql(model, constraint_name, new_db_params["check"])
)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if needs_database_default:
changes_sql, params = self._alter_column_default_sql(
model, old_field, new_field, drop=True
)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def _alter_column_null_sql(self, model, old_field, new_field):
"""
Hook to specialize column null alteration.
Return a (sql, params) fragment to set a column to null or non-null
as required by new_field, or None if no changes are required.
"""
if (
self.connection.features.interprets_empty_strings_as_nulls
and new_field.empty_strings_allowed
):
# The field is nullable in the database anyway, leave it alone.
return
else:
new_db_params = new_field.db_parameters(connection=self.connection)
sql = (
self.sql_alter_column_null
if new_field.null
else self.sql_alter_column_not_null
)
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_db_params["type"],
},
[],
)
def _alter_column_default_sql(self, model, old_field, new_field, drop=False):
"""
Hook to specialize column default alteration.
Return a (sql, params) fragment to add or drop (depending on the drop
argument) a default to new_field's column.
"""
new_default = self.effective_default(new_field)
default = self._column_default_sql(new_field)
params = [new_default]
if drop:
params = []
elif self.connection.features.requires_literal_defaults:
# Some databases (Oracle) can't take defaults as a parameter
# If this is the case, the SchemaEditor for that database should
# implement prepare_default().
default = self.prepare_default(new_default)
params = []
new_db_params = new_field.db_parameters(connection=self.connection)
if drop:
if new_field.null:
sql = self.sql_alter_column_no_default_null
else:
sql = self.sql_alter_column_no_default
else:
sql = self.sql_alter_column_default
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_db_params["type"],
"default": default,
},
params,
)
def _alter_column_type_sql(
self, model, old_field, new_field, new_type, old_collation, new_collation
):
"""
Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields).
Return a two-tuple of: an SQL fragment of (sql, params) to insert into
an ALTER TABLE statement and a list of extra (sql, params) tuples to
run once the field is altered.
"""
other_actions = []
if collate_sql := self._collate_sql(
new_collation, old_collation, model._meta.db_table
):
collate_sql = f" {collate_sql}"
else:
collate_sql = ""
# Comment change?
comment_sql = ""
if self.connection.features.supports_comments and not new_field.many_to_many:
if old_field.db_comment != new_field.db_comment:
# PostgreSQL and Oracle can't execute 'ALTER COLUMN ...' and
# 'COMMENT ON ...' at the same time.
sql, params = self._alter_column_comment_sql(
model, new_field, new_type, new_field.db_comment
)
if sql:
other_actions.append((sql, params))
if new_field.db_comment:
comment_sql = self._comment_sql(new_field.db_comment)
return (
(
self.sql_alter_column_type
% {
"column": self.quote_name(new_field.column),
"type": new_type,
"collation": collate_sql,
"comment": comment_sql,
},
[],
),
other_actions,
)
def _alter_column_comment_sql(self, model, new_field, new_type, new_db_comment):
return (
self.sql_alter_column_comment
% {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"comment": self._comment_sql(new_db_comment),
},
[],
)
def _comment_sql(self, comment):
return self.quote_value(comment or "")
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""Alter M2Ms to repoint their to= endpoints."""
# Rename the through table
if (
old_field.remote_field.through._meta.db_table
!= new_field.remote_field.through._meta.db_table
):
self.alter_db_table(
old_field.remote_field.through,
old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table,
)
# Repoint the FK to the other side
self.alter_field(
new_field.remote_field.through,
# The field that points to the target model is needed, so we can
# tell alter_field to change it - this is m2m_reverse_field_name()
# (as opposed to m2m_field_name(), which points to our model).
old_field.remote_field.through._meta.get_field(
old_field.m2m_reverse_field_name()
),
new_field.remote_field.through._meta.get_field(
new_field.m2m_reverse_field_name()
),
)
self.alter_field(
new_field.remote_field.through,
# for self-referential models we need to alter field from the other end too
old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()),
)
def _create_index_name(self, table_name, column_names, suffix=""):
"""
Generate a unique name for an index/unique constraint.
The name is divided into 3 parts: the table name, the column names,
and a unique digest and suffix.
"""
_, table_name = split_identifier(table_name)
hash_suffix_part = "%s%s" % (
names_digest(table_name, *column_names, length=8),
suffix,
)
max_length = self.connection.ops.max_name_length() or 200
# If everything fits into max_length, use that name.
index_name = "%s_%s_%s" % (table_name, "_".join(column_names), hash_suffix_part)
if len(index_name) <= max_length:
return index_name
# Shorten a long suffix.
if len(hash_suffix_part) > max_length / 3:
hash_suffix_part = hash_suffix_part[: max_length // 3]
other_length = (max_length - len(hash_suffix_part)) // 2 - 1
index_name = "%s_%s_%s" % (
table_name[:other_length],
"_".join(column_names)[:other_length],
hash_suffix_part,
)
# Prepend D if needed to prevent the name from starting with an
# underscore or a number (not permitted on Oracle).
if index_name[0] == "_" or index_name[0].isdigit():
index_name = "D%s" % index_name[:-1]
return index_name
def _get_index_tablespace_sql(self, model, fields, db_tablespace=None):
if db_tablespace is None:
if len(fields) == 1 and fields[0].db_tablespace:
db_tablespace = fields[0].db_tablespace
elif settings.DEFAULT_INDEX_TABLESPACE:
db_tablespace = settings.DEFAULT_INDEX_TABLESPACE
elif model._meta.db_tablespace:
db_tablespace = model._meta.db_tablespace
if db_tablespace is not None:
return " " + self.connection.ops.tablespace_sql(db_tablespace)
return ""
def _index_condition_sql(self, condition):
if condition:
return " WHERE " + condition
return ""
def _index_include_sql(self, model, columns):
if not columns or not self.connection.features.supports_covering_indexes:
return ""
return Statement(
" INCLUDE (%(columns)s)",
columns=Columns(model._meta.db_table, columns, self.quote_name),
)
def _create_index_sql(
self,
model,
*,
fields=None,
name=None,
suffix="",
using="",
db_tablespace=None,
col_suffixes=(),
sql=None,
opclasses=(),
condition=None,
include=None,
expressions=None,
):
"""
Return the SQL statement to create the index for one or several fields
or expressions. `sql` can be specified if the syntax differs from the
standard (GIS indexes, ...).
"""
fields = fields or []
expressions = expressions or []
compiler = Query(model, alias_cols=False).get_compiler(
connection=self.connection,
)
tablespace_sql = self._get_index_tablespace_sql(
model, fields, db_tablespace=db_tablespace
)
columns = [field.column for field in fields]
sql_create_index = sql or self.sql_create_index
table = model._meta.db_table
def create_index_name(*args, **kwargs):
nonlocal name
if name is None:
name = self._create_index_name(*args, **kwargs)
return self.quote_name(name)
return Statement(
sql_create_index,
table=Table(table, self.quote_name),
name=IndexName(table, columns, suffix, create_index_name),
using=using,
columns=(
self._index_columns(table, columns, col_suffixes, opclasses)
if columns
else Expressions(table, expressions, compiler, self.quote_value)
),
extra=tablespace_sql,
condition=self._index_condition_sql(condition),
include=self._index_include_sql(model, include),
)
def _delete_index_sql(self, model, name, sql=None):
return Statement(
sql or self.sql_delete_index,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
)
def _rename_index_sql(self, model, old_name, new_name):
return Statement(
self.sql_rename_index,
table=Table(model._meta.db_table, self.quote_name),
old_name=self.quote_name(old_name),
new_name=self.quote_name(new_name),
)
def _index_columns(self, table, columns, col_suffixes, opclasses):
return Columns(table, columns, self.quote_name, col_suffixes=col_suffixes)
def _model_indexes_sql(self, model):
"""
Return a list of all index SQL statements (field indexes,
index_together, Meta.indexes) for the specified model.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
for field in model._meta.local_fields:
output.extend(self._field_indexes_sql(model, field))
for field_names in model._meta.index_together:
fields = [model._meta.get_field(field) for field in field_names]
output.append(self._create_index_sql(model, fields=fields, suffix="_idx"))
for index in model._meta.indexes:
if (
not index.contains_expressions
or self.connection.features.supports_expression_indexes
):
output.append(index.create_sql(model, self))
return output
def _field_indexes_sql(self, model, field):
"""
Return a list of all index SQL statements for the specified field.
"""
output = []
if self._field_should_be_indexed(model, field):
output.append(self._create_index_sql(model, fields=[field]))
return output
def _field_should_be_altered(self, old_field, new_field, ignore=None):
ignore = ignore or set()
_, old_path, old_args, old_kwargs = old_field.deconstruct()
_, new_path, new_args, new_kwargs = new_field.deconstruct()
# Don't alter when:
# - changing only a field name
# - changing an attribute that doesn't affect the schema
# - changing an attribute in the provided set of ignored attributes
# - adding only a db_column and the column name is not changed
for attr in ignore.union(old_field.non_db_attrs):
old_kwargs.pop(attr, None)
for attr in ignore.union(new_field.non_db_attrs):
new_kwargs.pop(attr, None)
return self.quote_name(old_field.column) != self.quote_name(
new_field.column
) or (old_path, old_args, old_kwargs) != (new_path, new_args, new_kwargs)
def _field_should_be_indexed(self, model, field):
return field.db_index and not field.unique
def _field_became_primary_key(self, old_field, new_field):
return not old_field.primary_key and new_field.primary_key
def _unique_should_be_added(self, old_field, new_field):
return (
not new_field.primary_key
and new_field.unique
and (not old_field.unique or old_field.primary_key)
)
def _rename_field_sql(self, table, old_field, new_field, new_type):
return self.sql_rename_column % {
"table": self.quote_name(table),
"old_column": self.quote_name(old_field.column),
"new_column": self.quote_name(new_field.column),
"type": new_type,
}
def _create_fk_sql(self, model, field, suffix):
table = Table(model._meta.db_table, self.quote_name)
name = self._fk_constraint_name(model, field, suffix)
column = Columns(model._meta.db_table, [field.column], self.quote_name)
to_table = Table(field.target_field.model._meta.db_table, self.quote_name)
to_column = Columns(
field.target_field.model._meta.db_table,
[field.target_field.column],
self.quote_name,
)
deferrable = self.connection.ops.deferrable_sql()
return Statement(
self.sql_create_fk,
table=table,
name=name,
column=column,
to_table=to_table,
to_column=to_column,
deferrable=deferrable,
)
def _fk_constraint_name(self, model, field, suffix):
def create_fk_name(*args, **kwargs):
return self.quote_name(self._create_index_name(*args, **kwargs))
return ForeignKeyName(
model._meta.db_table,
[field.column],
split_identifier(field.target_field.model._meta.db_table)[1],
[field.target_field.column],
suffix,
create_fk_name,
)
def _delete_fk_sql(self, model, name):
return self._delete_constraint_sql(self.sql_delete_fk, model, name)
def _deferrable_constraint_sql(self, deferrable):
if deferrable is None:
return ""
if deferrable == Deferrable.DEFERRED:
return " DEFERRABLE INITIALLY DEFERRED"
if deferrable == Deferrable.IMMEDIATE:
return " DEFERRABLE INITIALLY IMMEDIATE"
def _unique_sql(
self,
model,
fields,
name,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
):
return None
if condition or include or opclasses or expressions:
# Databases support conditional, covering, and functional unique
# constraints via a unique index.
sql = self._create_unique_sql(
model,
fields,
name=name,
condition=condition,
include=include,
opclasses=opclasses,
expressions=expressions,
)
if sql:
self.deferred_sql.append(sql)
return None
constraint = self.sql_unique_constraint % {
"columns": ", ".join([self.quote_name(field.column) for field in fields]),
"deferrable": self._deferrable_constraint_sql(deferrable),
}
return self.sql_constraint % {
"name": self.quote_name(name),
"constraint": constraint,
}
def _create_unique_sql(
self,
model,
fields,
name=None,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
(
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
)
or (condition and not self.connection.features.supports_partial_indexes)
or (include and not self.connection.features.supports_covering_indexes)
or (
expressions and not self.connection.features.supports_expression_indexes
)
):
return None
compiler = Query(model, alias_cols=False).get_compiler(
connection=self.connection
)
table = model._meta.db_table
columns = [field.column for field in fields]
if name is None:
name = self._unique_constraint_name(table, columns, quote=True)
else:
name = self.quote_name(name)
if condition or include or opclasses or expressions:
sql = self.sql_create_unique_index
else:
sql = self.sql_create_unique
if columns:
columns = self._index_columns(
table, columns, col_suffixes=(), opclasses=opclasses
)
else:
columns = Expressions(table, expressions, compiler, self.quote_value)
return Statement(
sql,
table=Table(table, self.quote_name),
name=name,
columns=columns,
condition=self._index_condition_sql(condition),
deferrable=self._deferrable_constraint_sql(deferrable),
include=self._index_include_sql(model, include),
)
def _unique_constraint_name(self, table, columns, quote=True):
if quote:
def create_unique_name(*args, **kwargs):
return self.quote_name(self._create_index_name(*args, **kwargs))
else:
create_unique_name = self._create_index_name
return IndexName(table, columns, "_uniq", create_unique_name)
def _delete_unique_sql(
self,
model,
name,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
(
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
)
or (condition and not self.connection.features.supports_partial_indexes)
or (include and not self.connection.features.supports_covering_indexes)
or (
expressions and not self.connection.features.supports_expression_indexes
)
):
return None
if condition or include or opclasses or expressions:
sql = self.sql_delete_index
else:
sql = self.sql_delete_unique
return self._delete_constraint_sql(sql, model, name)
def _check_sql(self, name, check):
return self.sql_constraint % {
"name": self.quote_name(name),
"constraint": self.sql_check_constraint % {"check": check},
}
def _create_check_sql(self, model, name, check):
return Statement(
self.sql_create_check,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
check=check,
)
def _delete_check_sql(self, model, name):
if not self.connection.features.supports_table_check_constraints:
return None
return self._delete_constraint_sql(self.sql_delete_check, model, name)
def _delete_constraint_sql(self, template, model, name):
return Statement(
template,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
)
def _constraint_names(
self,
model,
column_names=None,
unique=None,
primary_key=None,
index=None,
foreign_key=None,
check=None,
type_=None,
exclude=None,
):
"""Return all constraint names matching the columns and conditions."""
if column_names is not None:
column_names = [
self.connection.introspection.identifier_converter(name)
for name in column_names
]
with self.connection.cursor() as cursor:
constraints = self.connection.introspection.get_constraints(
cursor, model._meta.db_table
)
result = []
for name, infodict in constraints.items():
if column_names is None or column_names == infodict["columns"]:
if unique is not None and infodict["unique"] != unique:
continue
if primary_key is not None and infodict["primary_key"] != primary_key:
continue
if index is not None and infodict["index"] != index:
continue
if check is not None and infodict["check"] != check:
continue
if foreign_key is not None and not infodict["foreign_key"]:
continue
if type_ is not None and infodict["type"] != type_:
continue
if not exclude or name not in exclude:
result.append(name)
return result
def _delete_primary_key(self, model, strict=False):
constraint_names = self._constraint_names(model, primary_key=True)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of PK constraints for %s"
% (
len(constraint_names),
model._meta.db_table,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_primary_key_sql(model, constraint_name))
def _create_primary_key_sql(self, model, field):
return Statement(
self.sql_create_pk,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(
self._create_index_name(
model._meta.db_table, [field.column], suffix="_pk"
)
),
columns=Columns(model._meta.db_table, [field.column], self.quote_name),
)
def _delete_primary_key_sql(self, model, name):
return self._delete_constraint_sql(self.sql_delete_pk, model, name)
def _collate_sql(self, collation, old_collation=None, table_name=None):
return "COLLATE " + self.quote_name(collation) if collation else ""
def remove_procedure(self, procedure_name, param_types=()):
sql = self.sql_delete_procedure % {
"procedure": self.quote_name(procedure_name),
"param_types": ",".join(param_types),
}
self.execute(sql)
|
33314c70190d33f4bdd53f204e3a45d75edfbf844470248c45b2d1db9db22563 | import operator
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
allows_group_by_selected_pks = True
related_fields_match_type = True
# MySQL doesn't support sliced subqueries with IN/ALL/ANY/SOME.
allow_sliced_subqueries_with_in = False
has_select_for_update = True
supports_forward_references = False
supports_regex_backreferencing = False
supports_date_lookup_using_string = False
supports_timezones = False
requires_explicit_null_ordering_when_grouping = True
atomic_transactions = False
can_clone_databases = True
supports_comments = True
supports_comments_inline = True
supports_temporal_subtraction = True
supports_slicing_ordering_in_compound = True
supports_index_on_text_field = False
supports_update_conflicts = True
create_test_procedure_without_params_sql = """
CREATE PROCEDURE test_procedure ()
BEGIN
DECLARE V_I INTEGER;
SET V_I = 1;
END;
"""
create_test_procedure_with_int_param_sql = """
CREATE PROCEDURE test_procedure (P_I INTEGER)
BEGIN
DECLARE V_I INTEGER;
SET V_I = P_I;
END;
"""
create_test_table_with_composite_primary_key = """
CREATE TABLE test_table_composite_pk (
column_1 INTEGER NOT NULL,
column_2 INTEGER NOT NULL,
PRIMARY KEY(column_1, column_2)
)
"""
# Neither MySQL nor MariaDB support partial indexes.
supports_partial_indexes = False
# COLLATE must be wrapped in parentheses because MySQL treats COLLATE as an
# indexed expression.
collate_as_index_expression = True
supports_order_by_nulls_modifier = False
order_by_nulls_first = True
supports_logical_xor = True
@cached_property
def minimum_database_version(self):
if self.connection.mysql_is_mariadb:
return (10, 4)
else:
return (8,)
@cached_property
def test_collations(self):
charset = "utf8"
if (
self.connection.mysql_is_mariadb
and self.connection.mysql_version >= (10, 6)
) or (
not self.connection.mysql_is_mariadb
and self.connection.mysql_version >= (8, 0, 30)
):
# utf8 is an alias for utf8mb3 in MariaDB 10.6+ and MySQL 8.0.30+.
charset = "utf8mb3"
return {
"ci": f"{charset}_general_ci",
"non_default": f"{charset}_esperanto_ci",
"swedish_ci": f"{charset}_swedish_ci",
}
test_now_utc_template = "UTC_TIMESTAMP(6)"
@cached_property
def django_test_skips(self):
skips = {
"This doesn't work on MySQL.": {
"db_functions.comparison.test_greatest.GreatestTests."
"test_coalesce_workaround",
"db_functions.comparison.test_least.LeastTests."
"test_coalesce_workaround",
},
"Running on MySQL requires utf8mb4 encoding (#18392).": {
"model_fields.test_textfield.TextFieldTests.test_emoji",
"model_fields.test_charfield.TestCharField.test_emoji",
},
"MySQL doesn't support functional indexes on a function that "
"returns JSON": {
"schema.tests.SchemaTests.test_func_index_json_key_transform",
},
"MySQL supports multiplying and dividing DurationFields by a "
"scalar value but it's not implemented (#25287).": {
"expressions.tests.FTimeDeltaTests.test_durationfield_multiply_divide",
},
"UPDATE ... ORDER BY syntax on MySQL/MariaDB does not support ordering by"
"related fields.": {
"update.tests.AdvancedTests."
"test_update_ordered_by_inline_m2m_annotation",
"update.tests.AdvancedTests.test_update_ordered_by_m2m_annotation",
},
}
if self.connection.mysql_is_mariadb and (
10,
4,
3,
) < self.connection.mysql_version < (10, 5, 2):
skips.update(
{
"https://jira.mariadb.org/browse/MDEV-19598": {
"schema.tests.SchemaTests."
"test_alter_not_unique_field_to_primary_key",
},
}
)
if self.connection.mysql_is_mariadb and (
10,
4,
12,
) < self.connection.mysql_version < (10, 5):
skips.update(
{
"https://jira.mariadb.org/browse/MDEV-22775": {
"schema.tests.SchemaTests."
"test_alter_pk_with_self_referential_field",
},
}
)
if not self.supports_explain_analyze:
skips.update(
{
"MariaDB and MySQL >= 8.0.18 specific.": {
"queries.test_explain.ExplainTests.test_mysql_analyze",
},
}
)
return skips
@cached_property
def _mysql_storage_engine(self):
"Internal method used in Django tests. Don't rely on this from your code"
return self.connection.mysql_server_data["default_storage_engine"]
@cached_property
def allows_auto_pk_0(self):
"""
Autoincrement primary key can be set to 0 if it doesn't generate new
autoincrement values.
"""
return "NO_AUTO_VALUE_ON_ZERO" in self.connection.sql_mode
@cached_property
def update_can_self_select(self):
return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
10,
3,
2,
)
@cached_property
def can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
return self._mysql_storage_engine != "MyISAM"
@cached_property
def introspected_field_types(self):
return {
**super().introspected_field_types,
"BinaryField": "TextField",
"BooleanField": "IntegerField",
"DurationField": "BigIntegerField",
"GenericIPAddressField": "CharField",
}
@cached_property
def can_return_columns_from_insert(self):
return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
10,
5,
0,
)
can_return_rows_from_bulk_insert = property(
operator.attrgetter("can_return_columns_from_insert")
)
@cached_property
def has_zoneinfo_database(self):
return self.connection.mysql_server_data["has_zoneinfo_database"]
@cached_property
def is_sql_auto_is_null_enabled(self):
return self.connection.mysql_server_data["sql_auto_is_null"]
@cached_property
def supports_over_clause(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 2)
supports_frame_range_fixed_distance = property(
operator.attrgetter("supports_over_clause")
)
@cached_property
def supports_column_check_constraints(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 16)
supports_table_check_constraints = property(
operator.attrgetter("supports_column_check_constraints")
)
@cached_property
def can_introspect_check_constraints(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 16)
@cached_property
def has_select_for_update_skip_locked(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 6)
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def has_select_for_update_nowait(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def has_select_for_update_of(self):
return (
not self.connection.mysql_is_mariadb
and self.connection.mysql_version >= (8, 0, 1)
)
@cached_property
def supports_explain_analyze(self):
return self.connection.mysql_is_mariadb or self.connection.mysql_version >= (
8,
0,
18,
)
@cached_property
def supported_explain_formats(self):
# Alias MySQL's TRADITIONAL to TEXT for consistency with other
# backends.
formats = {"JSON", "TEXT", "TRADITIONAL"}
if not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
8,
0,
16,
):
formats.add("TREE")
return formats
@cached_property
def supports_transactions(self):
"""
All storage engines except MyISAM support transactions.
"""
return self._mysql_storage_engine != "MyISAM"
uses_savepoints = property(operator.attrgetter("supports_transactions"))
can_release_savepoints = property(operator.attrgetter("supports_transactions"))
@cached_property
def ignores_table_name_case(self):
return self.connection.mysql_server_data["lower_case_table_names"]
@cached_property
def supports_default_in_lead_lag(self):
# To be added in https://jira.mariadb.org/browse/MDEV-12981.
return not self.connection.mysql_is_mariadb
@cached_property
def can_introspect_json_field(self):
if self.connection.mysql_is_mariadb:
return self.can_introspect_check_constraints
return True
@cached_property
def supports_index_column_ordering(self):
if self._mysql_storage_engine != "InnoDB":
return False
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 8)
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def supports_expression_indexes(self):
return (
not self.connection.mysql_is_mariadb
and self._mysql_storage_engine != "MyISAM"
and self.connection.mysql_version >= (8, 0, 13)
)
@cached_property
def supports_select_intersection(self):
is_mariadb = self.connection.mysql_is_mariadb
return is_mariadb or self.connection.mysql_version >= (8, 0, 31)
supports_select_difference = property(
operator.attrgetter("supports_select_intersection")
)
@cached_property
def can_rename_index(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 5, 2)
return True
|
746a52aca012ea9d952a3e262ca7d3425e2b0fbf0351b38831d47a2bd354f4ef | from collections import namedtuple
import sqlparse
from MySQLdb.constants import FIELD_TYPE
from django.db.backends.base.introspection import BaseDatabaseIntrospection
from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo
from django.db.backends.base.introspection import TableInfo as BaseTableInfo
from django.db.models import Index
from django.utils.datastructures import OrderedSet
FieldInfo = namedtuple(
"FieldInfo",
BaseFieldInfo._fields + ("extra", "is_unsigned", "has_json_constraint", "comment"),
)
InfoLine = namedtuple(
"InfoLine",
"col_name data_type max_len num_prec num_scale extra column_default "
"collation is_unsigned comment",
)
TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",))
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = {
FIELD_TYPE.BLOB: "TextField",
FIELD_TYPE.CHAR: "CharField",
FIELD_TYPE.DECIMAL: "DecimalField",
FIELD_TYPE.NEWDECIMAL: "DecimalField",
FIELD_TYPE.DATE: "DateField",
FIELD_TYPE.DATETIME: "DateTimeField",
FIELD_TYPE.DOUBLE: "FloatField",
FIELD_TYPE.FLOAT: "FloatField",
FIELD_TYPE.INT24: "IntegerField",
FIELD_TYPE.JSON: "JSONField",
FIELD_TYPE.LONG: "IntegerField",
FIELD_TYPE.LONGLONG: "BigIntegerField",
FIELD_TYPE.SHORT: "SmallIntegerField",
FIELD_TYPE.STRING: "CharField",
FIELD_TYPE.TIME: "TimeField",
FIELD_TYPE.TIMESTAMP: "DateTimeField",
FIELD_TYPE.TINY: "IntegerField",
FIELD_TYPE.TINY_BLOB: "TextField",
FIELD_TYPE.MEDIUM_BLOB: "TextField",
FIELD_TYPE.LONG_BLOB: "TextField",
FIELD_TYPE.VAR_STRING: "CharField",
}
def get_field_type(self, data_type, description):
field_type = super().get_field_type(data_type, description)
if "auto_increment" in description.extra:
if field_type == "IntegerField":
return "AutoField"
elif field_type == "BigIntegerField":
return "BigAutoField"
elif field_type == "SmallIntegerField":
return "SmallAutoField"
if description.is_unsigned:
if field_type == "BigIntegerField":
return "PositiveBigIntegerField"
elif field_type == "IntegerField":
return "PositiveIntegerField"
elif field_type == "SmallIntegerField":
return "PositiveSmallIntegerField"
# JSON data type is an alias for LONGTEXT in MariaDB, use check
# constraints clauses to introspect JSONField.
if description.has_json_constraint:
return "JSONField"
return field_type
def get_table_list(self, cursor):
"""Return a list of table and view names in the current database."""
cursor.execute(
"""
SELECT
table_name,
table_type,
table_comment
FROM information_schema.tables
WHERE table_schema = DATABASE()
"""
)
return [
TableInfo(row[0], {"BASE TABLE": "t", "VIEW": "v"}.get(row[1]), row[2])
for row in cursor.fetchall()
]
def get_table_description(self, cursor, table_name):
"""
Return a description of the table with the DB-API cursor.description
interface."
"""
json_constraints = {}
if (
self.connection.mysql_is_mariadb
and self.connection.features.can_introspect_json_field
):
# JSON data type is an alias for LONGTEXT in MariaDB, select
# JSON_VALID() constraints to introspect JSONField.
cursor.execute(
"""
SELECT c.constraint_name AS column_name
FROM information_schema.check_constraints AS c
WHERE
c.table_name = %s AND
LOWER(c.check_clause) =
'json_valid(`' + LOWER(c.constraint_name) + '`)' AND
c.constraint_schema = DATABASE()
""",
[table_name],
)
json_constraints = {row[0] for row in cursor.fetchall()}
# A default collation for the given table.
cursor.execute(
"""
SELECT table_collation
FROM information_schema.tables
WHERE table_schema = DATABASE()
AND table_name = %s
""",
[table_name],
)
row = cursor.fetchone()
default_column_collation = row[0] if row else ""
# information_schema database gives more accurate results for some figures:
# - varchar length returned by cursor.description is an internal length,
# not visible length (#5725)
# - precision and scale (for decimal fields) (#5014)
# - auto_increment is not available in cursor.description
cursor.execute(
"""
SELECT
column_name, data_type, character_maximum_length,
numeric_precision, numeric_scale, extra, column_default,
CASE
WHEN collation_name = %s THEN NULL
ELSE collation_name
END AS collation_name,
CASE
WHEN column_type LIKE '%% unsigned' THEN 1
ELSE 0
END AS is_unsigned,
column_comment
FROM information_schema.columns
WHERE table_name = %s AND table_schema = DATABASE()
""",
[default_column_collation, table_name],
)
field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()}
cursor.execute(
"SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)
)
def to_int(i):
return int(i) if i is not None else i
fields = []
for line in cursor.description:
info = field_info[line[0]]
fields.append(
FieldInfo(
*line[:2],
to_int(info.max_len) or line[2],
to_int(info.max_len) or line[3],
to_int(info.num_prec) or line[4],
to_int(info.num_scale) or line[5],
line[6],
info.column_default,
info.collation,
info.extra,
info.is_unsigned,
line[0] in json_constraints,
info.comment,
)
)
return fields
def get_sequences(self, cursor, table_name, table_fields=()):
for field_info in self.get_table_description(cursor, table_name):
if "auto_increment" in field_info.extra:
# MySQL allows only one auto-increment column per table.
return [{"table": table_name, "column": field_info.name}]
return []
def get_relations(self, cursor, table_name):
"""
Return a dictionary of {field_name: (field_name_other_table, other_table)}
representing all foreign keys in the given table.
"""
cursor.execute(
"""
SELECT column_name, referenced_column_name, referenced_table_name
FROM information_schema.key_column_usage
WHERE table_name = %s
AND table_schema = DATABASE()
AND referenced_table_name IS NOT NULL
AND referenced_column_name IS NOT NULL
""",
[table_name],
)
return {
field_name: (other_field, other_table)
for field_name, other_field, other_table in cursor.fetchall()
}
def get_storage_engine(self, cursor, table_name):
"""
Retrieve the storage engine for a given table. Return the default
storage engine if the table doesn't exist.
"""
cursor.execute(
"""
SELECT engine
FROM information_schema.tables
WHERE
table_name = %s AND
table_schema = DATABASE()
""",
[table_name],
)
result = cursor.fetchone()
if not result:
return self.connection.features._mysql_storage_engine
return result[0]
def _parse_constraint_columns(self, check_clause, columns):
check_columns = OrderedSet()
statement = sqlparse.parse(check_clause)[0]
tokens = (token for token in statement.flatten() if not token.is_whitespace)
for token in tokens:
if (
token.ttype == sqlparse.tokens.Name
and self.connection.ops.quote_name(token.value) == token.value
and token.value[1:-1] in columns
):
check_columns.add(token.value[1:-1])
return check_columns
def get_constraints(self, cursor, table_name):
"""
Retrieve any constraints or keys (unique, pk, fk, check, index) across
one or more columns.
"""
constraints = {}
# Get the actual constraint names and columns
name_query = """
SELECT kc.`constraint_name`, kc.`column_name`,
kc.`referenced_table_name`, kc.`referenced_column_name`,
c.`constraint_type`
FROM
information_schema.key_column_usage AS kc,
information_schema.table_constraints AS c
WHERE
kc.table_schema = DATABASE() AND
c.table_schema = kc.table_schema AND
c.constraint_name = kc.constraint_name AND
c.constraint_type != 'CHECK' AND
kc.table_name = %s
ORDER BY kc.`ordinal_position`
"""
cursor.execute(name_query, [table_name])
for constraint, column, ref_table, ref_column, kind in cursor.fetchall():
if constraint not in constraints:
constraints[constraint] = {
"columns": OrderedSet(),
"primary_key": kind == "PRIMARY KEY",
"unique": kind in {"PRIMARY KEY", "UNIQUE"},
"index": False,
"check": False,
"foreign_key": (ref_table, ref_column) if ref_column else None,
}
if self.connection.features.supports_index_column_ordering:
constraints[constraint]["orders"] = []
constraints[constraint]["columns"].add(column)
# Add check constraints.
if self.connection.features.can_introspect_check_constraints:
unnamed_constraints_index = 0
columns = {
info.name for info in self.get_table_description(cursor, table_name)
}
if self.connection.mysql_is_mariadb:
type_query = """
SELECT c.constraint_name, c.check_clause
FROM information_schema.check_constraints AS c
WHERE
c.constraint_schema = DATABASE() AND
c.table_name = %s
"""
else:
type_query = """
SELECT cc.constraint_name, cc.check_clause
FROM
information_schema.check_constraints AS cc,
information_schema.table_constraints AS tc
WHERE
cc.constraint_schema = DATABASE() AND
tc.table_schema = cc.constraint_schema AND
cc.constraint_name = tc.constraint_name AND
tc.constraint_type = 'CHECK' AND
tc.table_name = %s
"""
cursor.execute(type_query, [table_name])
for constraint, check_clause in cursor.fetchall():
constraint_columns = self._parse_constraint_columns(
check_clause, columns
)
# Ensure uniqueness of unnamed constraints. Unnamed unique
# and check columns constraints have the same name as
# a column.
if set(constraint_columns) == {constraint}:
unnamed_constraints_index += 1
constraint = "__unnamed_constraint_%s__" % unnamed_constraints_index
constraints[constraint] = {
"columns": constraint_columns,
"primary_key": False,
"unique": False,
"index": False,
"check": True,
"foreign_key": None,
}
# Now add in the indexes
cursor.execute(
"SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name)
)
for table, non_unique, index, colseq, column, order, type_ in [
x[:6] + (x[10],) for x in cursor.fetchall()
]:
if index not in constraints:
constraints[index] = {
"columns": OrderedSet(),
"primary_key": False,
"unique": not non_unique,
"check": False,
"foreign_key": None,
}
if self.connection.features.supports_index_column_ordering:
constraints[index]["orders"] = []
constraints[index]["index"] = True
constraints[index]["type"] = (
Index.suffix if type_ == "BTREE" else type_.lower()
)
constraints[index]["columns"].add(column)
if self.connection.features.supports_index_column_ordering:
constraints[index]["orders"].append("DESC" if order == "D" else "ASC")
# Convert the sorted sets to lists
for constraint in constraints.values():
constraint["columns"] = list(constraint["columns"])
return constraints
|
66424e957668bf6a61eaf256c3ca4188150785c0408d716b1f1c0d4478e8e2dd | from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.models import NOT_PROVIDED, F, UniqueConstraint
from django.db.models.constants import LOOKUP_SEP
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
sql_alter_column_type = "MODIFY %(column)s %(type)s%(collation)s%(comment)s"
sql_alter_column_no_default_null = "ALTER COLUMN %(column)s SET DEFAULT NULL"
# No 'CASCADE' which works as a no-op in MySQL but is undocumented
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
sql_create_column_inline_fk = (
", ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s(%(to_column)s)"
)
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
sql_rename_index = "ALTER TABLE %(table)s RENAME INDEX %(old_name)s TO %(new_name)s"
sql_create_pk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
)
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s"
sql_alter_table_comment = "ALTER TABLE %(table)s COMMENT = %(comment)s"
sql_alter_column_comment = None
@property
def sql_delete_check(self):
if self.connection.mysql_is_mariadb:
# The name of the column check constraint is the same as the field
# name on MariaDB. Adding IF EXISTS clause prevents migrations
# crash. Constraint is removed during a "MODIFY" column statement.
return "ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(name)s"
return "ALTER TABLE %(table)s DROP CHECK %(name)s"
@property
def sql_rename_column(self):
# MariaDB >= 10.5.2 and MySQL >= 8.0.4 support an
# "ALTER TABLE ... RENAME COLUMN" statement.
if self.connection.mysql_is_mariadb:
if self.connection.mysql_version >= (10, 5, 2):
return super().sql_rename_column
elif self.connection.mysql_version >= (8, 0, 4):
return super().sql_rename_column
return "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
def quote_value(self, value):
self.connection.ensure_connection()
if isinstance(value, str):
value = value.replace("%", "%%")
# MySQLdb escapes to string, PyMySQL to bytes.
quoted = self.connection.connection.escape(
value, self.connection.connection.encoders
)
if isinstance(value, str) and isinstance(quoted, bytes):
quoted = quoted.decode()
return quoted
def _is_limited_data_type(self, field):
db_type = field.db_type(self.connection)
return (
db_type is not None
and db_type.lower() in self.connection._limited_data_types
)
def skip_default(self, field):
if not self._supports_limited_data_type_defaults:
return self._is_limited_data_type(field)
return False
def skip_default_on_alter(self, field):
if self._is_limited_data_type(field) and not self.connection.mysql_is_mariadb:
# MySQL doesn't support defaults for BLOB and TEXT in the
# ALTER COLUMN statement.
return True
return False
@property
def _supports_limited_data_type_defaults(self):
# MariaDB and MySQL >= 8.0.13 support defaults for BLOB and TEXT.
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 13)
def _column_default_sql(self, field):
if (
not self.connection.mysql_is_mariadb
and self._supports_limited_data_type_defaults
and self._is_limited_data_type(field)
):
# MySQL supports defaults for BLOB and TEXT columns only if the
# default value is written as an expression i.e. in parentheses.
return "(%s)"
return super()._column_default_sql(field)
def add_field(self, model, field):
super().add_field(model, field)
# Simulate the effect of a one-off default.
# field.default may be unhashable, so a set isn't used for "in" check.
if self.skip_default(field) and field.default not in (None, NOT_PROVIDED):
effective_default = self.effective_default(field)
self.execute(
"UPDATE %(table)s SET %(column)s = %%s"
% {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
},
[effective_default],
)
def remove_constraint(self, model, constraint):
if isinstance(constraint, UniqueConstraint):
self._create_missing_fk_index(
model,
fields=constraint.fields,
expressions=constraint.expressions,
)
super().remove_constraint(model, constraint)
def remove_index(self, model, index):
self._create_missing_fk_index(
model,
fields=[field_name for field_name, _ in index.fields_orders],
expressions=index.expressions,
)
super().remove_index(model, index)
def _field_should_be_indexed(self, model, field):
if not super()._field_should_be_indexed(model, field):
return False
storage = self.connection.introspection.get_storage_engine(
self.connection.cursor(), model._meta.db_table
)
# No need to create an index for ForeignKey fields except if
# db_constraint=False because the index from that constraint won't be
# created.
if (
storage == "InnoDB"
and field.get_internal_type() == "ForeignKey"
and field.db_constraint
):
return False
return not self._is_limited_data_type(field)
def _create_missing_fk_index(
self,
model,
*,
fields,
expressions=None,
):
"""
MySQL can remove an implicit FK index on a field when that field is
covered by another index like a unique_together. "covered" here means
that the more complex index has the FK field as its first field (see
https://bugs.mysql.com/bug.php?id=37910).
Manually create an implicit FK index to make it possible to remove the
composed index.
"""
first_field_name = None
if fields:
first_field_name = fields[0]
elif (
expressions
and self.connection.features.supports_expression_indexes
and isinstance(expressions[0], F)
and LOOKUP_SEP not in expressions[0].name
):
first_field_name = expressions[0].name
if not first_field_name:
return
first_field = model._meta.get_field(first_field_name)
if first_field.get_internal_type() == "ForeignKey":
column = self.connection.introspection.identifier_converter(
first_field.column
)
with self.connection.cursor() as cursor:
constraint_names = [
name
for name, infodict in self.connection.introspection.get_constraints(
cursor, model._meta.db_table
).items()
if infodict["index"] and infodict["columns"][0] == column
]
# There are no other indexes that starts with the FK field, only
# the index that is expected to be deleted.
if len(constraint_names) == 1:
self.execute(
self._create_index_sql(model, fields=[first_field], suffix="")
)
def _delete_composed_index(self, model, fields, *args):
self._create_missing_fk_index(model, fields=fields)
return super()._delete_composed_index(model, fields, *args)
def _set_field_new_type_null_status(self, field, new_type):
"""
Keep the null property of the old field. If it has changed, it will be
handled separately.
"""
if field.null:
new_type += " NULL"
else:
new_type += " NOT NULL"
return new_type
def _alter_column_type_sql(
self, model, old_field, new_field, new_type, old_collation, new_collation
):
new_type = self._set_field_new_type_null_status(old_field, new_type)
return super()._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
def _rename_field_sql(self, table, old_field, new_field, new_type):
new_type = self._set_field_new_type_null_status(old_field, new_type)
return super()._rename_field_sql(table, old_field, new_field, new_type)
def _alter_column_comment_sql(self, model, new_field, new_type, new_db_comment):
# Comment is alter when altering the column type.
return "", []
def _comment_sql(self, comment):
comment_sql = super()._comment_sql(comment)
return f" COMMENT {comment_sql}"
|
479b19f9dd1326d4292fdba32729074f0d10511c974b7a60e1b8905706eaadc7 | import operator
from django.db import DataError, InterfaceError
from django.db.backends.base.features import BaseDatabaseFeatures
from django.db.backends.postgresql.psycopg_any import is_psycopg3
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
minimum_database_version = (12,)
allows_group_by_selected_pks = True
can_return_columns_from_insert = True
can_return_rows_from_bulk_insert = True
has_real_datatype = True
has_native_uuid_field = True
has_native_duration_field = True
has_native_json_field = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_select_for_update_of = True
has_select_for_update_skip_locked = True
has_select_for_no_key_update = True
can_release_savepoints = True
supports_comments = True
supports_tablespaces = True
supports_transactions = True
can_introspect_materialized_views = True
can_distinct_on_fields = True
can_rollback_ddl = True
schema_editor_uses_clientside_param_binding = True
supports_combined_alters = True
nulls_order_largest = True
closed_cursor_error_class = InterfaceError
greatest_least_ignores_nulls = True
can_clone_databases = True
supports_temporal_subtraction = True
supports_slicing_ordering_in_compound = True
create_test_procedure_without_params_sql = """
CREATE FUNCTION test_procedure () RETURNS void AS $$
DECLARE
V_I INTEGER;
BEGIN
V_I := 1;
END;
$$ LANGUAGE plpgsql;"""
create_test_procedure_with_int_param_sql = """
CREATE FUNCTION test_procedure (P_I INTEGER) RETURNS void AS $$
DECLARE
V_I INTEGER;
BEGIN
V_I := P_I;
END;
$$ LANGUAGE plpgsql;"""
create_test_table_with_composite_primary_key = """
CREATE TABLE test_table_composite_pk (
column_1 INTEGER NOT NULL,
column_2 INTEGER NOT NULL,
PRIMARY KEY(column_1, column_2)
)
"""
requires_casted_case_in_updates = True
supports_over_clause = True
only_supports_unbounded_with_preceding_and_following = True
supports_aggregate_filter_clause = True
supported_explain_formats = {"JSON", "TEXT", "XML", "YAML"}
supports_deferrable_unique_constraints = True
has_json_operators = True
json_key_contains_list_matching_requires_list = True
supports_update_conflicts = True
supports_update_conflicts_with_target = True
supports_covering_indexes = True
can_rename_index = True
test_collations = {
"non_default": "sv-x-icu",
"swedish_ci": "sv-x-icu",
}
test_now_utc_template = "STATEMENT_TIMESTAMP() AT TIME ZONE 'UTC'"
django_test_skips = {
"opclasses are PostgreSQL only.": {
"indexes.tests.SchemaIndexesNotPostgreSQLTests."
"test_create_index_ignores_opclasses",
},
}
@cached_property
def prohibits_null_characters_in_text_exception(self):
if is_psycopg3:
return DataError, "PostgreSQL text fields cannot contain NUL (0x00) bytes"
else:
return ValueError, "A string literal cannot contain NUL (0x00) characters."
@cached_property
def introspected_field_types(self):
return {
**super().introspected_field_types,
"PositiveBigIntegerField": "BigIntegerField",
"PositiveIntegerField": "IntegerField",
"PositiveSmallIntegerField": "SmallIntegerField",
}
@cached_property
def is_postgresql_13(self):
return self.connection.pg_version >= 130000
@cached_property
def is_postgresql_14(self):
return self.connection.pg_version >= 140000
has_bit_xor = property(operator.attrgetter("is_postgresql_14"))
supports_covering_spgist_indexes = property(operator.attrgetter("is_postgresql_14"))
supports_unlimited_charfield = True
|
a100a2789eed59e260ebcbd3ae3cceca012fb825e19b2fc88964ca05e8149e5a | from collections import namedtuple
from django.db.backends.base.introspection import BaseDatabaseIntrospection
from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo
from django.db.backends.base.introspection import TableInfo as BaseTableInfo
from django.db.models import Index
FieldInfo = namedtuple("FieldInfo", BaseFieldInfo._fields + ("is_autofield", "comment"))
TableInfo = namedtuple("TableInfo", BaseTableInfo._fields + ("comment",))
class DatabaseIntrospection(BaseDatabaseIntrospection):
# Maps type codes to Django Field types.
data_types_reverse = {
16: "BooleanField",
17: "BinaryField",
20: "BigIntegerField",
21: "SmallIntegerField",
23: "IntegerField",
25: "TextField",
700: "FloatField",
701: "FloatField",
869: "GenericIPAddressField",
1042: "CharField", # blank-padded
1043: "CharField",
1082: "DateField",
1083: "TimeField",
1114: "DateTimeField",
1184: "DateTimeField",
1186: "DurationField",
1266: "TimeField",
1700: "DecimalField",
2950: "UUIDField",
3802: "JSONField",
}
# A hook for subclasses.
index_default_access_method = "btree"
ignored_tables = []
def get_field_type(self, data_type, description):
field_type = super().get_field_type(data_type, description)
if description.is_autofield or (
# Required for pre-Django 4.1 serial columns.
description.default
and "nextval" in description.default
):
if field_type == "IntegerField":
return "AutoField"
elif field_type == "BigIntegerField":
return "BigAutoField"
elif field_type == "SmallIntegerField":
return "SmallAutoField"
return field_type
def get_table_list(self, cursor):
"""Return a list of table and view names in the current database."""
cursor.execute(
"""
SELECT
c.relname,
CASE
WHEN c.relispartition THEN 'p'
WHEN c.relkind IN ('m', 'v') THEN 'v'
ELSE 't'
END,
obj_description(c.oid)
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('f', 'm', 'p', 'r', 'v')
AND n.nspname NOT IN ('pg_catalog', 'pg_toast')
AND pg_catalog.pg_table_is_visible(c.oid)
"""
)
return [
TableInfo(*row)
for row in cursor.fetchall()
if row[0] not in self.ignored_tables
]
def get_table_description(self, cursor, table_name):
"""
Return a description of the table with the DB-API cursor.description
interface.
"""
# Query the pg_catalog tables as cursor.description does not reliably
# return the nullable property and information_schema.columns does not
# contain details of materialized views.
cursor.execute(
"""
SELECT
a.attname AS column_name,
NOT (a.attnotnull OR (t.typtype = 'd' AND t.typnotnull)) AS is_nullable,
pg_get_expr(ad.adbin, ad.adrelid) AS column_default,
CASE WHEN collname = 'default' THEN NULL ELSE collname END AS collation,
a.attidentity != '' AS is_autofield,
col_description(a.attrelid, a.attnum) AS column_comment
FROM pg_attribute a
LEFT JOIN pg_attrdef ad ON a.attrelid = ad.adrelid AND a.attnum = ad.adnum
LEFT JOIN pg_collation co ON a.attcollation = co.oid
JOIN pg_type t ON a.atttypid = t.oid
JOIN pg_class c ON a.attrelid = c.oid
JOIN pg_namespace n ON c.relnamespace = n.oid
WHERE c.relkind IN ('f', 'm', 'p', 'r', 'v')
AND c.relname = %s
AND n.nspname NOT IN ('pg_catalog', 'pg_toast')
AND pg_catalog.pg_table_is_visible(c.oid)
""",
[table_name],
)
field_map = {line[0]: line[1:] for line in cursor.fetchall()}
cursor.execute(
"SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)
)
return [
FieldInfo(
line.name,
line.type_code,
# display_size is always None on psycopg2.
line.internal_size if line.display_size is None else line.display_size,
line.internal_size,
line.precision,
line.scale,
*field_map[line.name],
)
for line in cursor.description
]
def get_sequences(self, cursor, table_name, table_fields=()):
cursor.execute(
"""
SELECT
s.relname AS sequence_name,
a.attname AS colname
FROM
pg_class s
JOIN pg_depend d ON d.objid = s.oid
AND d.classid = 'pg_class'::regclass
AND d.refclassid = 'pg_class'::regclass
JOIN pg_attribute a ON d.refobjid = a.attrelid
AND d.refobjsubid = a.attnum
JOIN pg_class tbl ON tbl.oid = d.refobjid
AND tbl.relname = %s
AND pg_catalog.pg_table_is_visible(tbl.oid)
WHERE
s.relkind = 'S';
""",
[table_name],
)
return [
{"name": row[0], "table": table_name, "column": row[1]}
for row in cursor.fetchall()
]
def get_relations(self, cursor, table_name):
"""
Return a dictionary of {field_name: (field_name_other_table, other_table)}
representing all foreign keys in the given table.
"""
cursor.execute(
"""
SELECT a1.attname, c2.relname, a2.attname
FROM pg_constraint con
LEFT JOIN pg_class c1 ON con.conrelid = c1.oid
LEFT JOIN pg_class c2 ON con.confrelid = c2.oid
LEFT JOIN
pg_attribute a1 ON c1.oid = a1.attrelid AND a1.attnum = con.conkey[1]
LEFT JOIN
pg_attribute a2 ON c2.oid = a2.attrelid AND a2.attnum = con.confkey[1]
WHERE
c1.relname = %s AND
con.contype = 'f' AND
c1.relnamespace = c2.relnamespace AND
pg_catalog.pg_table_is_visible(c1.oid)
""",
[table_name],
)
return {row[0]: (row[2], row[1]) for row in cursor.fetchall()}
def get_constraints(self, cursor, table_name):
"""
Retrieve any constraints or keys (unique, pk, fk, check, index) across
one or more columns. Also retrieve the definition of expression-based
indexes.
"""
constraints = {}
# Loop over the key table, collecting things as constraints. The column
# array must return column names in the same order in which they were
# created.
cursor.execute(
"""
SELECT
c.conname,
array(
SELECT attname
FROM unnest(c.conkey) WITH ORDINALITY cols(colid, arridx)
JOIN pg_attribute AS ca ON cols.colid = ca.attnum
WHERE ca.attrelid = c.conrelid
ORDER BY cols.arridx
),
c.contype,
(SELECT fkc.relname || '.' || fka.attname
FROM pg_attribute AS fka
JOIN pg_class AS fkc ON fka.attrelid = fkc.oid
WHERE fka.attrelid = c.confrelid AND fka.attnum = c.confkey[1]),
cl.reloptions
FROM pg_constraint AS c
JOIN pg_class AS cl ON c.conrelid = cl.oid
WHERE cl.relname = %s AND pg_catalog.pg_table_is_visible(cl.oid)
""",
[table_name],
)
for constraint, columns, kind, used_cols, options in cursor.fetchall():
constraints[constraint] = {
"columns": columns,
"primary_key": kind == "p",
"unique": kind in ["p", "u"],
"foreign_key": tuple(used_cols.split(".", 1)) if kind == "f" else None,
"check": kind == "c",
"index": False,
"definition": None,
"options": options,
}
# Now get indexes
cursor.execute(
"""
SELECT
indexname,
array_agg(attname ORDER BY arridx),
indisunique,
indisprimary,
array_agg(ordering ORDER BY arridx),
amname,
exprdef,
s2.attoptions
FROM (
SELECT
c2.relname as indexname, idx.*, attr.attname, am.amname,
CASE
WHEN idx.indexprs IS NOT NULL THEN
pg_get_indexdef(idx.indexrelid)
END AS exprdef,
CASE am.amname
WHEN %s THEN
CASE (option & 1)
WHEN 1 THEN 'DESC' ELSE 'ASC'
END
END as ordering,
c2.reloptions as attoptions
FROM (
SELECT *
FROM
pg_index i,
unnest(i.indkey, i.indoption)
WITH ORDINALITY koi(key, option, arridx)
) idx
LEFT JOIN pg_class c ON idx.indrelid = c.oid
LEFT JOIN pg_class c2 ON idx.indexrelid = c2.oid
LEFT JOIN pg_am am ON c2.relam = am.oid
LEFT JOIN
pg_attribute attr ON attr.attrelid = c.oid AND attr.attnum = idx.key
WHERE c.relname = %s AND pg_catalog.pg_table_is_visible(c.oid)
) s2
GROUP BY indexname, indisunique, indisprimary, amname, exprdef, attoptions;
""",
[self.index_default_access_method, table_name],
)
for (
index,
columns,
unique,
primary,
orders,
type_,
definition,
options,
) in cursor.fetchall():
if index not in constraints:
basic_index = (
type_ == self.index_default_access_method
and
# '_btree' references
# django.contrib.postgres.indexes.BTreeIndex.suffix.
not index.endswith("_btree")
and options is None
)
constraints[index] = {
"columns": columns if columns != [None] else [],
"orders": orders if orders != [None] else [],
"primary_key": primary,
"unique": unique,
"foreign_key": None,
"check": False,
"index": True,
"type": Index.suffix if basic_index else type_,
"definition": definition,
"options": options,
}
return constraints
|
90e3fca990edbb9f17f40edc2ef0fb5f3524eb5be5dd589ad0e1b0f0c4047e56 | """
PostgreSQL database backend for Django.
Requires psycopg2 >= 2.8.4 or psycopg >= 3.1
"""
import asyncio
import threading
import warnings
from contextlib import contextmanager
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import DatabaseError as WrappedDatabaseError
from django.db import connections
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.utils import CursorDebugWrapper as BaseCursorDebugWrapper
from django.utils.asyncio import async_unsafe
from django.utils.functional import cached_property
from django.utils.safestring import SafeString
from django.utils.version import get_version_tuple
try:
try:
import psycopg as Database
except ImportError:
import psycopg2 as Database
except ImportError:
raise ImproperlyConfigured("Error loading psycopg2 or psycopg module")
def psycopg_version():
version = Database.__version__.split(" ", 1)[0]
return get_version_tuple(version)
if psycopg_version() < (2, 8, 4):
raise ImproperlyConfigured(
f"psycopg2 version 2.8.4 or newer is required; you have {Database.__version__}"
)
if (3,) <= psycopg_version() < (3, 1):
raise ImproperlyConfigured(
f"psycopg version 3.1 or newer is required; you have {Database.__version__}"
)
from .psycopg_any import IsolationLevel, is_psycopg3 # NOQA isort:skip
if is_psycopg3:
from psycopg import adapters, sql
from psycopg.pq import Format
from .psycopg_any import get_adapters_template, register_tzloader
TIMESTAMPTZ_OID = adapters.types["timestamptz"].oid
else:
import psycopg2.extensions
import psycopg2.extras
psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString)
psycopg2.extras.register_uuid()
# Register support for inet[] manually so we don't have to handle the Inet()
# object on load all the time.
INETARRAY_OID = 1041
INETARRAY = psycopg2.extensions.new_array_type(
(INETARRAY_OID,),
"INETARRAY",
psycopg2.extensions.UNICODE,
)
psycopg2.extensions.register_type(INETARRAY)
# Some of these import psycopg, so import them after checking if it's installed.
from .client import DatabaseClient # NOQA isort:skip
from .creation import DatabaseCreation # NOQA isort:skip
from .features import DatabaseFeatures # NOQA isort:skip
from .introspection import DatabaseIntrospection # NOQA isort:skip
from .operations import DatabaseOperations # NOQA isort:skip
from .schema import DatabaseSchemaEditor # NOQA isort:skip
def _get_varchar_column(data):
if data["max_length"] is None:
return "varchar"
return "varchar(%(max_length)s)" % data
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = "postgresql"
display_name = "PostgreSQL"
# This dictionary maps Field objects to their associated PostgreSQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
data_types = {
"AutoField": "integer",
"BigAutoField": "bigint",
"BinaryField": "bytea",
"BooleanField": "boolean",
"CharField": _get_varchar_column,
"DateField": "date",
"DateTimeField": "timestamp with time zone",
"DecimalField": "numeric(%(max_digits)s, %(decimal_places)s)",
"DurationField": "interval",
"FileField": "varchar(%(max_length)s)",
"FilePathField": "varchar(%(max_length)s)",
"FloatField": "double precision",
"IntegerField": "integer",
"BigIntegerField": "bigint",
"IPAddressField": "inet",
"GenericIPAddressField": "inet",
"JSONField": "jsonb",
"OneToOneField": "integer",
"PositiveBigIntegerField": "bigint",
"PositiveIntegerField": "integer",
"PositiveSmallIntegerField": "smallint",
"SlugField": "varchar(%(max_length)s)",
"SmallAutoField": "smallint",
"SmallIntegerField": "smallint",
"TextField": "text",
"TimeField": "time",
"UUIDField": "uuid",
}
data_type_check_constraints = {
"PositiveBigIntegerField": '"%(column)s" >= 0',
"PositiveIntegerField": '"%(column)s" >= 0',
"PositiveSmallIntegerField": '"%(column)s" >= 0',
}
data_types_suffix = {
"AutoField": "GENERATED BY DEFAULT AS IDENTITY",
"BigAutoField": "GENERATED BY DEFAULT AS IDENTITY",
"SmallAutoField": "GENERATED BY DEFAULT AS IDENTITY",
}
operators = {
"exact": "= %s",
"iexact": "= UPPER(%s)",
"contains": "LIKE %s",
"icontains": "LIKE UPPER(%s)",
"regex": "~ %s",
"iregex": "~* %s",
"gt": "> %s",
"gte": ">= %s",
"lt": "< %s",
"lte": "<= %s",
"startswith": "LIKE %s",
"endswith": "LIKE %s",
"istartswith": "LIKE UPPER(%s)",
"iendswith": "LIKE UPPER(%s)",
}
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = (
r"REPLACE(REPLACE(REPLACE({}, E'\\', E'\\\\'), E'%%', E'\\%%'), E'_', E'\\_')"
)
pattern_ops = {
"contains": "LIKE '%%' || {} || '%%'",
"icontains": "LIKE '%%' || UPPER({}) || '%%'",
"startswith": "LIKE {} || '%%'",
"istartswith": "LIKE UPPER({}) || '%%'",
"endswith": "LIKE '%%' || {}",
"iendswith": "LIKE '%%' || UPPER({})",
}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
# Classes instantiated in __init__().
client_class = DatabaseClient
creation_class = DatabaseCreation
features_class = DatabaseFeatures
introspection_class = DatabaseIntrospection
ops_class = DatabaseOperations
# PostgreSQL backend-specific attributes.
_named_cursor_idx = 0
def get_database_version(self):
"""
Return a tuple of the database's version.
E.g. for pg_version 120004, return (12, 4).
"""
return divmod(self.pg_version, 10000)
def get_connection_params(self):
settings_dict = self.settings_dict
# None may be used to connect to the default 'postgres' db
if settings_dict["NAME"] == "" and not settings_dict.get("OPTIONS", {}).get(
"service"
):
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME or OPTIONS['service'] value."
)
if len(settings_dict["NAME"] or "") > self.ops.max_name_length():
raise ImproperlyConfigured(
"The database name '%s' (%d characters) is longer than "
"PostgreSQL's limit of %d characters. Supply a shorter NAME "
"in settings.DATABASES."
% (
settings_dict["NAME"],
len(settings_dict["NAME"]),
self.ops.max_name_length(),
)
)
conn_params = {"client_encoding": "UTF8"}
if settings_dict["NAME"]:
conn_params = {
"dbname": settings_dict["NAME"],
**settings_dict["OPTIONS"],
}
elif settings_dict["NAME"] is None:
# Connect to the default 'postgres' db.
settings_dict.get("OPTIONS", {}).pop("service", None)
conn_params = {"dbname": "postgres", **settings_dict["OPTIONS"]}
else:
conn_params = {**settings_dict["OPTIONS"]}
conn_params.pop("assume_role", None)
conn_params.pop("isolation_level", None)
if settings_dict["USER"]:
conn_params["user"] = settings_dict["USER"]
if settings_dict["PASSWORD"]:
conn_params["password"] = settings_dict["PASSWORD"]
if settings_dict["HOST"]:
conn_params["host"] = settings_dict["HOST"]
if settings_dict["PORT"]:
conn_params["port"] = settings_dict["PORT"]
if is_psycopg3:
conn_params["context"] = get_adapters_template(
settings.USE_TZ, self.timezone
)
# Disable prepared statements by default to keep connection poolers
# working. Can be reenabled via OPTIONS in the settings dict.
conn_params["prepare_threshold"] = conn_params.pop(
"prepare_threshold", None
)
return conn_params
@async_unsafe
def get_new_connection(self, conn_params):
# self.isolation_level must be set:
# - after connecting to the database in order to obtain the database's
# default when no value is explicitly specified in options.
# - before calling _set_autocommit() because if autocommit is on, that
# will set connection.isolation_level to ISOLATION_LEVEL_AUTOCOMMIT.
options = self.settings_dict["OPTIONS"]
set_isolation_level = False
try:
isolation_level_value = options["isolation_level"]
except KeyError:
self.isolation_level = IsolationLevel.READ_COMMITTED
else:
# Set the isolation level to the value from OPTIONS.
try:
self.isolation_level = IsolationLevel(isolation_level_value)
set_isolation_level = True
except ValueError:
raise ImproperlyConfigured(
f"Invalid transaction isolation level {isolation_level_value} "
f"specified. Use one of the psycopg.IsolationLevel values."
)
connection = self.Database.connect(**conn_params)
if set_isolation_level:
connection.isolation_level = self.isolation_level
if not is_psycopg3:
# Register dummy loads() to avoid a round trip from psycopg2's
# decode to json.dumps() to json.loads(), when using a custom
# decoder in JSONField.
psycopg2.extras.register_default_jsonb(
conn_or_curs=connection, loads=lambda x: x
)
connection.cursor_factory = Cursor
return connection
def ensure_timezone(self):
if self.connection is None:
return False
conn_timezone_name = self.connection.info.parameter_status("TimeZone")
timezone_name = self.timezone_name
if timezone_name and conn_timezone_name != timezone_name:
with self.connection.cursor() as cursor:
cursor.execute(self.ops.set_time_zone_sql(), [timezone_name])
return True
return False
def ensure_role(self):
if self.connection is None:
return False
if new_role := self.settings_dict.get("OPTIONS", {}).get("assume_role"):
with self.connection.cursor() as cursor:
sql = self.ops.compose_sql("SET ROLE %s", [new_role])
cursor.execute(sql)
return True
return False
def init_connection_state(self):
super().init_connection_state()
# Commit after setting the time zone.
commit_tz = self.ensure_timezone()
# Set the role on the connection. This is useful if the credential used
# to login is not the same as the role that owns database resources. As
# can be the case when using temporary or ephemeral credentials.
commit_role = self.ensure_role()
if (commit_role or commit_tz) and not self.get_autocommit():
self.connection.commit()
@async_unsafe
def create_cursor(self, name=None):
if name:
# In autocommit mode, the cursor will be used outside of a
# transaction, hence use a holdable cursor.
cursor = self.connection.cursor(
name, scrollable=False, withhold=self.connection.autocommit
)
else:
cursor = self.connection.cursor()
if is_psycopg3:
# Register the cursor timezone only if the connection disagrees, to
# avoid copying the adapter map.
tzloader = self.connection.adapters.get_loader(TIMESTAMPTZ_OID, Format.TEXT)
if self.timezone != tzloader.timezone:
register_tzloader(self.timezone, cursor)
else:
cursor.tzinfo_factory = self.tzinfo_factory if settings.USE_TZ else None
return cursor
def tzinfo_factory(self, offset):
return self.timezone
@async_unsafe
def chunked_cursor(self):
self._named_cursor_idx += 1
# Get the current async task
# Note that right now this is behind @async_unsafe, so this is
# unreachable, but in future we'll start loosening this restriction.
# For now, it's here so that every use of "threading" is
# also async-compatible.
try:
current_task = asyncio.current_task()
except RuntimeError:
current_task = None
# Current task can be none even if the current_task call didn't error
if current_task:
task_ident = str(id(current_task))
else:
task_ident = "sync"
# Use that and the thread ident to get a unique name
return self._cursor(
name="_django_curs_%d_%s_%d"
% (
# Avoid reusing name in other threads / tasks
threading.current_thread().ident,
task_ident,
self._named_cursor_idx,
)
)
def _set_autocommit(self, autocommit):
with self.wrap_database_errors:
self.connection.autocommit = autocommit
def check_constraints(self, table_names=None):
"""
Check constraints by setting them to immediate. Return them to deferred
afterward.
"""
with self.cursor() as cursor:
cursor.execute("SET CONSTRAINTS ALL IMMEDIATE")
cursor.execute("SET CONSTRAINTS ALL DEFERRED")
def is_usable(self):
try:
# Use a psycopg cursor directly, bypassing Django's utilities.
with self.connection.cursor() as cursor:
cursor.execute("SELECT 1")
except Database.Error:
return False
else:
return True
@contextmanager
def _nodb_cursor(self):
cursor = None
try:
with super()._nodb_cursor() as cursor:
yield cursor
except (Database.DatabaseError, WrappedDatabaseError):
if cursor is not None:
raise
warnings.warn(
"Normally Django will use a connection to the 'postgres' database "
"to avoid running initialization queries against the production "
"database when it's not needed (for example, when running tests). "
"Django was unable to create a connection to the 'postgres' database "
"and will use the first PostgreSQL database instead.",
RuntimeWarning,
)
for connection in connections.all():
if (
connection.vendor == "postgresql"
and connection.settings_dict["NAME"] != "postgres"
):
conn = self.__class__(
{
**self.settings_dict,
"NAME": connection.settings_dict["NAME"],
},
alias=self.alias,
)
try:
with conn.cursor() as cursor:
yield cursor
finally:
conn.close()
break
else:
raise
@cached_property
def pg_version(self):
with self.temporary_connection():
return self.connection.info.server_version
def make_debug_cursor(self, cursor):
return CursorDebugWrapper(cursor, self)
if is_psycopg3:
class Cursor(Database.Cursor):
"""
A subclass of psycopg cursor implementing callproc.
"""
def callproc(self, name, args=None):
if not isinstance(name, sql.Identifier):
name = sql.Identifier(name)
qparts = [sql.SQL("SELECT * FROM "), name, sql.SQL("(")]
if args:
for item in args:
qparts.append(sql.Literal(item))
qparts.append(sql.SQL(","))
del qparts[-1]
qparts.append(sql.SQL(")"))
stmt = sql.Composed(qparts)
self.execute(stmt)
return args
class CursorDebugWrapper(BaseCursorDebugWrapper):
def copy(self, statement):
with self.debug_sql(statement):
return self.cursor.copy(statement)
else:
Cursor = psycopg2.extensions.cursor
class CursorDebugWrapper(BaseCursorDebugWrapper):
def copy_expert(self, sql, file, *args):
with self.debug_sql(sql):
return self.cursor.copy_expert(sql, file, *args)
def copy_to(self, file, table, *args, **kwargs):
with self.debug_sql(sql="COPY %s TO STDOUT" % table):
return self.cursor.copy_to(file, table, *args, **kwargs)
|
e2a830023746cdc244693056da3bc0fb1246dc43d96ede3d4dd9fd2951959e79 | import json
from functools import lru_cache, partial
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
from django.db.backends.postgresql.psycopg_any import (
Inet,
Jsonb,
errors,
is_psycopg3,
mogrify,
)
from django.db.backends.utils import split_tzname_delta
from django.db.models.constants import OnConflict
from django.utils.regex_helper import _lazy_re_compile
@lru_cache
def get_json_dumps(encoder):
if encoder is None:
return json.dumps
return partial(json.dumps, cls=encoder)
class DatabaseOperations(BaseDatabaseOperations):
cast_char_field_without_max_length = "varchar"
explain_prefix = "EXPLAIN"
explain_options = frozenset(
[
"ANALYZE",
"BUFFERS",
"COSTS",
"SETTINGS",
"SUMMARY",
"TIMING",
"VERBOSE",
"WAL",
]
)
cast_data_types = {
"AutoField": "integer",
"BigAutoField": "bigint",
"SmallAutoField": "smallint",
}
if is_psycopg3:
from psycopg.types import numeric
integerfield_type_map = {
"SmallIntegerField": numeric.Int2,
"IntegerField": numeric.Int4,
"BigIntegerField": numeric.Int8,
"PositiveSmallIntegerField": numeric.Int2,
"PositiveIntegerField": numeric.Int4,
"PositiveBigIntegerField": numeric.Int8,
}
def unification_cast_sql(self, output_field):
internal_type = output_field.get_internal_type()
if internal_type in (
"GenericIPAddressField",
"IPAddressField",
"TimeField",
"UUIDField",
):
# PostgreSQL will resolve a union as type 'text' if input types are
# 'unknown'.
# https://www.postgresql.org/docs/current/typeconv-union-case.html
# These fields cannot be implicitly cast back in the default
# PostgreSQL configuration so we need to explicitly cast them.
# We must also remove components of the type within brackets:
# varchar(255) -> varchar.
return (
"CAST(%%s AS %s)" % output_field.db_type(self.connection).split("(")[0]
)
return "%s"
# EXTRACT format cannot be passed in parameters.
_extract_format_re = _lazy_re_compile(r"[A-Z_]+")
def date_extract_sql(self, lookup_type, sql, params):
# https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT
if lookup_type == "week_day":
# For consistency across backends, we return Sunday=1, Saturday=7.
return f"EXTRACT(DOW FROM {sql}) + 1", params
elif lookup_type == "iso_week_day":
return f"EXTRACT(ISODOW FROM {sql})", params
elif lookup_type == "iso_year":
return f"EXTRACT(ISOYEAR FROM {sql})", params
lookup_type = lookup_type.upper()
if not self._extract_format_re.fullmatch(lookup_type):
raise ValueError(f"Invalid lookup type: {lookup_type!r}")
return f"EXTRACT({lookup_type} FROM {sql})", params
def date_trunc_sql(self, lookup_type, sql, params, tzname=None):
sql, params = self._convert_sql_to_tz(sql, params, tzname)
# https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return f"DATE_TRUNC(%s, {sql})", (lookup_type, *params)
def _prepare_tzname_delta(self, tzname):
tzname, sign, offset = split_tzname_delta(tzname)
if offset:
sign = "-" if sign == "+" else "+"
return f"{tzname}{sign}{offset}"
return tzname
def _convert_sql_to_tz(self, sql, params, tzname):
if tzname and settings.USE_TZ:
tzname_param = self._prepare_tzname_delta(tzname)
return f"{sql} AT TIME ZONE %s", (*params, tzname_param)
return sql, params
def datetime_cast_date_sql(self, sql, params, tzname):
sql, params = self._convert_sql_to_tz(sql, params, tzname)
return f"({sql})::date", params
def datetime_cast_time_sql(self, sql, params, tzname):
sql, params = self._convert_sql_to_tz(sql, params, tzname)
return f"({sql})::time", params
def datetime_extract_sql(self, lookup_type, sql, params, tzname):
sql, params = self._convert_sql_to_tz(sql, params, tzname)
if lookup_type == "second":
# Truncate fractional seconds.
return f"EXTRACT(SECOND FROM DATE_TRUNC(%s, {sql}))", ("second", *params)
return self.date_extract_sql(lookup_type, sql, params)
def datetime_trunc_sql(self, lookup_type, sql, params, tzname):
sql, params = self._convert_sql_to_tz(sql, params, tzname)
# https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return f"DATE_TRUNC(%s, {sql})", (lookup_type, *params)
def time_extract_sql(self, lookup_type, sql, params):
if lookup_type == "second":
# Truncate fractional seconds.
return f"EXTRACT(SECOND FROM DATE_TRUNC(%s, {sql}))", ("second", *params)
return self.date_extract_sql(lookup_type, sql, params)
def time_trunc_sql(self, lookup_type, sql, params, tzname=None):
sql, params = self._convert_sql_to_tz(sql, params, tzname)
return f"DATE_TRUNC(%s, {sql})::time", (lookup_type, *params)
def deferrable_sql(self):
return " DEFERRABLE INITIALLY DEFERRED"
def fetch_returned_insert_rows(self, cursor):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the tuple of returned data.
"""
return cursor.fetchall()
def lookup_cast(self, lookup_type, internal_type=None):
lookup = "%s"
if lookup_type == "isnull" and internal_type in (
"CharField",
"EmailField",
"TextField",
"CICharField",
"CIEmailField",
"CITextField",
):
return "%s::text"
# Cast text lookups to text to allow things like filter(x__contains=4)
if lookup_type in (
"iexact",
"contains",
"icontains",
"startswith",
"istartswith",
"endswith",
"iendswith",
"regex",
"iregex",
):
if internal_type in ("IPAddressField", "GenericIPAddressField"):
lookup = "HOST(%s)"
# RemovedInDjango51Warning.
elif internal_type in ("CICharField", "CIEmailField", "CITextField"):
lookup = "%s::citext"
else:
lookup = "%s::text"
# Use UPPER(x) for case-insensitive lookups; it's faster.
if lookup_type in ("iexact", "icontains", "istartswith", "iendswith"):
lookup = "UPPER(%s)" % lookup
return lookup
def no_limit_value(self):
return None
def prepare_sql_script(self, sql):
return [sql]
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def compose_sql(self, sql, params):
return mogrify(sql, params, self.connection)
def set_time_zone_sql(self):
return "SELECT set_config('TimeZone', %s, false)"
def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
if not tables:
return []
# Perform a single SQL 'TRUNCATE x, y, z...;' statement. It allows us
# to truncate tables referenced by a foreign key in any other table.
sql_parts = [
style.SQL_KEYWORD("TRUNCATE"),
", ".join(style.SQL_FIELD(self.quote_name(table)) for table in tables),
]
if reset_sequences:
sql_parts.append(style.SQL_KEYWORD("RESTART IDENTITY"))
if allow_cascade:
sql_parts.append(style.SQL_KEYWORD("CASCADE"))
return ["%s;" % " ".join(sql_parts)]
def sequence_reset_by_name_sql(self, style, sequences):
# 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements
# to reset sequence indices
sql = []
for sequence_info in sequences:
table_name = sequence_info["table"]
# 'id' will be the case if it's an m2m using an autogenerated
# intermediate table (see BaseDatabaseIntrospection.sequence_list).
column_name = sequence_info["column"] or "id"
sql.append(
"%s setval(pg_get_serial_sequence('%s','%s'), 1, false);"
% (
style.SQL_KEYWORD("SELECT"),
style.SQL_TABLE(self.quote_name(table_name)),
style.SQL_FIELD(column_name),
)
)
return sql
def tablespace_sql(self, tablespace, inline=False):
if inline:
return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace)
else:
return "TABLESPACE %s" % self.quote_name(tablespace)
def sequence_reset_sql(self, style, model_list):
from django.db import models
output = []
qn = self.quote_name
for model in model_list:
# Use `coalesce` to set the sequence for each model to the max pk
# value if there are records, or 1 if there are none. Set the
# `is_called` property (the third argument to `setval`) to true if
# there are records (as the max pk value is already in use),
# otherwise set it to false. Use pg_get_serial_sequence to get the
# underlying sequence name from the table name and column name.
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;"
% (
style.SQL_KEYWORD("SELECT"),
style.SQL_TABLE(qn(model._meta.db_table)),
style.SQL_FIELD(f.column),
style.SQL_FIELD(qn(f.column)),
style.SQL_FIELD(qn(f.column)),
style.SQL_KEYWORD("IS NOT"),
style.SQL_KEYWORD("FROM"),
style.SQL_TABLE(qn(model._meta.db_table)),
)
)
# Only one AutoField is allowed per model, so don't bother
# continuing.
break
return output
def prep_for_iexact_query(self, x):
return x
def max_name_length(self):
"""
Return the maximum length of an identifier.
The maximum length of an identifier is 63 by default, but can be
changed by recompiling PostgreSQL after editing the NAMEDATALEN
macro in src/include/pg_config_manual.h.
This implementation returns 63, but can be overridden by a custom
database backend that inherits most of its behavior from this one.
"""
return 63
def distinct_sql(self, fields, params):
if fields:
params = [param for param_list in params for param in param_list]
return (["DISTINCT ON (%s)" % ", ".join(fields)], params)
else:
return ["DISTINCT"], []
if is_psycopg3:
def last_executed_query(self, cursor, sql, params):
try:
return self.compose_sql(sql, params)
except errors.DataError:
return None
else:
def last_executed_query(self, cursor, sql, params):
# https://www.psycopg.org/docs/cursor.html#cursor.query
# The query attribute is a Psycopg extension to the DB API 2.0.
if cursor.query is not None:
return cursor.query.decode()
return None
def return_insert_columns(self, fields):
if not fields:
return "", ()
columns = [
"%s.%s"
% (
self.quote_name(field.model._meta.db_table),
self.quote_name(field.column),
)
for field in fields
]
return "RETURNING %s" % ", ".join(columns), ()
def bulk_insert_sql(self, fields, placeholder_rows):
placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql)
return "VALUES " + values_sql
if is_psycopg3:
def adapt_integerfield_value(self, value, internal_type):
if value is None or hasattr(value, "resolve_expression"):
return value
return self.integerfield_type_map[internal_type](value)
def adapt_datefield_value(self, value):
return value
def adapt_datetimefield_value(self, value):
return value
def adapt_timefield_value(self, value):
return value
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
return value
def adapt_ipaddressfield_value(self, value):
if value:
return Inet(value)
return None
def adapt_json_value(self, value, encoder):
return Jsonb(value, dumps=get_json_dumps(encoder))
def subtract_temporals(self, internal_type, lhs, rhs):
if internal_type == "DateField":
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
params = (*lhs_params, *rhs_params)
return "(interval '1 day' * (%s - %s))" % (lhs_sql, rhs_sql), params
return super().subtract_temporals(internal_type, lhs, rhs)
def explain_query_prefix(self, format=None, **options):
extra = {}
# Normalize options.
if options:
options = {
name.upper(): "true" if value else "false"
for name, value in options.items()
}
for valid_option in self.explain_options:
value = options.pop(valid_option, None)
if value is not None:
extra[valid_option] = value
prefix = super().explain_query_prefix(format, **options)
if format:
extra["FORMAT"] = format
if extra:
prefix += " (%s)" % ", ".join("%s %s" % i for i in extra.items())
return prefix
def on_conflict_suffix_sql(self, fields, on_conflict, update_fields, unique_fields):
if on_conflict == OnConflict.IGNORE:
return "ON CONFLICT DO NOTHING"
if on_conflict == OnConflict.UPDATE:
return "ON CONFLICT(%s) DO UPDATE SET %s" % (
", ".join(map(self.quote_name, unique_fields)),
", ".join(
[
f"{field} = EXCLUDED.{field}"
for field in map(self.quote_name, update_fields)
]
),
)
return super().on_conflict_suffix_sql(
fields,
on_conflict,
update_fields,
unique_fields,
)
|
dd562376afd45c13f3d7113e892bbd866f0cd706bd97bee00e6ce047d86baed8 | from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.ddl_references import IndexColumns
from django.db.backends.postgresql.psycopg_any import sql
from django.db.backends.utils import strip_quotes
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
# Setting all constraints to IMMEDIATE to allow changing data in the same
# transaction.
sql_update_with_default = (
"UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL"
"; SET CONSTRAINTS ALL IMMEDIATE"
)
sql_alter_sequence_type = "ALTER SEQUENCE IF EXISTS %(sequence)s AS %(type)s"
sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE"
sql_create_index = (
"CREATE INDEX %(name)s ON %(table)s%(using)s "
"(%(columns)s)%(include)s%(extra)s%(condition)s"
)
sql_create_index_concurrently = (
"CREATE INDEX CONCURRENTLY %(name)s ON %(table)s%(using)s "
"(%(columns)s)%(include)s%(extra)s%(condition)s"
)
sql_delete_index = "DROP INDEX IF EXISTS %(name)s"
sql_delete_index_concurrently = "DROP INDEX CONCURRENTLY IF EXISTS %(name)s"
# Setting the constraint to IMMEDIATE to allow changing data in the same
# transaction.
sql_create_column_inline_fk = (
"CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s"
"; SET CONSTRAINTS %(namespace)s%(name)s IMMEDIATE"
)
# Setting the constraint to IMMEDIATE runs any deferred checks to allow
# dropping it in the same transaction.
sql_delete_fk = (
"SET CONSTRAINTS %(name)s IMMEDIATE; "
"ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
)
sql_delete_procedure = "DROP FUNCTION %(procedure)s(%(param_types)s)"
def execute(self, sql, params=()):
# Merge the query client-side, as PostgreSQL won't do it server-side.
if params is None:
return super().execute(sql, params)
sql = self.connection.ops.compose_sql(str(sql), params)
# Don't let the superclass touch anything.
return super().execute(sql, None)
sql_add_identity = (
"ALTER TABLE %(table)s ALTER COLUMN %(column)s ADD "
"GENERATED BY DEFAULT AS IDENTITY"
)
sql_drop_indentity = (
"ALTER TABLE %(table)s ALTER COLUMN %(column)s DROP IDENTITY IF EXISTS"
)
def quote_value(self, value):
if isinstance(value, str):
value = value.replace("%", "%%")
return sql.quote(value, self.connection.connection)
def _field_indexes_sql(self, model, field):
output = super()._field_indexes_sql(model, field)
like_index_statement = self._create_like_index_sql(model, field)
if like_index_statement is not None:
output.append(like_index_statement)
return output
def _field_data_type(self, field):
if field.is_relation:
return field.rel_db_type(self.connection)
return self.connection.data_types.get(
field.get_internal_type(),
field.db_type(self.connection),
)
def _field_base_data_types(self, field):
# Yield base data types for array fields.
if field.base_field.get_internal_type() == "ArrayField":
yield from self._field_base_data_types(field.base_field)
else:
yield self._field_data_type(field.base_field)
def _create_like_index_sql(self, model, field):
"""
Return the statement to create an index with varchar operator pattern
when the column type is 'varchar' or 'text', otherwise return None.
"""
db_type = field.db_type(connection=self.connection)
if db_type is not None and (field.db_index or field.unique):
# Fields with database column types of `varchar` and `text` need
# a second index that specifies their operator class, which is
# needed when performing correct LIKE queries outside the
# C locale. See #12234.
#
# The same doesn't apply to array fields such as varchar[size]
# and text[size], so skip them.
if "[" in db_type:
return None
# Non-deterministic collations on Postgresql don't support indexes
# for operator classes varchar_pattern_ops/text_pattern_ops.
if getattr(field, "db_collation", None):
return None
if db_type.startswith("varchar"):
return self._create_index_sql(
model,
fields=[field],
suffix="_like",
opclasses=["varchar_pattern_ops"],
)
elif db_type.startswith("text"):
return self._create_index_sql(
model,
fields=[field],
suffix="_like",
opclasses=["text_pattern_ops"],
)
return None
def _using_sql(self, new_field, old_field):
using_sql = " USING %(column)s::%(type)s"
new_internal_type = new_field.get_internal_type()
old_internal_type = old_field.get_internal_type()
if new_internal_type == "ArrayField" and new_internal_type == old_internal_type:
# Compare base data types for array fields.
if list(self._field_base_data_types(old_field)) != list(
self._field_base_data_types(new_field)
):
return using_sql
elif self._field_data_type(old_field) != self._field_data_type(new_field):
return using_sql
return ""
def _get_sequence_name(self, table, column):
with self.connection.cursor() as cursor:
for sequence in self.connection.introspection.get_sequences(cursor, table):
if sequence["column"] == column:
return sequence["name"]
return None
def _alter_column_type_sql(
self, model, old_field, new_field, new_type, old_collation, new_collation
):
# Drop indexes on varchar/text/citext columns that are changing to a
# different type.
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params["type"]
if (old_field.db_index or old_field.unique) and (
(old_type.startswith("varchar") and not new_type.startswith("varchar"))
or (old_type.startswith("text") and not new_type.startswith("text"))
or (old_type.startswith("citext") and not new_type.startswith("citext"))
):
index_name = self._create_index_name(
model._meta.db_table, [old_field.column], suffix="_like"
)
self.execute(self._delete_index_sql(model, index_name))
self.sql_alter_column_type = (
"ALTER COLUMN %(column)s TYPE %(type)s%(collation)s"
)
# Cast when data type changed.
if using_sql := self._using_sql(new_field, old_field):
self.sql_alter_column_type += using_sql
new_internal_type = new_field.get_internal_type()
old_internal_type = old_field.get_internal_type()
# Make ALTER TYPE with IDENTITY make sense.
table = strip_quotes(model._meta.db_table)
auto_field_types = {
"AutoField",
"BigAutoField",
"SmallAutoField",
}
old_is_auto = old_internal_type in auto_field_types
new_is_auto = new_internal_type in auto_field_types
if new_is_auto and not old_is_auto:
column = strip_quotes(new_field.column)
return (
(
self.sql_alter_column_type
% {
"column": self.quote_name(column),
"type": new_type,
"collation": "",
},
[],
),
[
(
self.sql_add_identity
% {
"table": self.quote_name(table),
"column": self.quote_name(column),
},
[],
),
],
)
elif old_is_auto and not new_is_auto:
# Drop IDENTITY if exists (pre-Django 4.1 serial columns don't have
# it).
self.execute(
self.sql_drop_indentity
% {
"table": self.quote_name(table),
"column": self.quote_name(strip_quotes(new_field.column)),
}
)
column = strip_quotes(new_field.column)
fragment, _ = super()._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
# Drop the sequence if exists (Django 4.1+ identity columns don't
# have it).
other_actions = []
if sequence_name := self._get_sequence_name(table, column):
other_actions = [
(
self.sql_delete_sequence
% {
"sequence": self.quote_name(sequence_name),
},
[],
)
]
return fragment, other_actions
elif new_is_auto and old_is_auto and old_internal_type != new_internal_type:
fragment, _ = super()._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
column = strip_quotes(new_field.column)
db_types = {
"AutoField": "integer",
"BigAutoField": "bigint",
"SmallAutoField": "smallint",
}
# Alter the sequence type if exists (Django 4.1+ identity columns
# don't have it).
other_actions = []
if sequence_name := self._get_sequence_name(table, column):
other_actions = [
(
self.sql_alter_sequence_type
% {
"sequence": self.quote_name(sequence_name),
"type": db_types[new_internal_type],
},
[],
),
]
return fragment, other_actions
else:
return super()._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
def _alter_column_collation_sql(
self, model, new_field, new_type, new_collation, old_field
):
sql = self.sql_alter_column_collate
# Cast when data type changed.
if using_sql := self._using_sql(new_field, old_field):
sql += using_sql
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_type,
"collation": " " + self._collate_sql(new_collation)
if new_collation
else "",
},
[],
)
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
super()._alter_field(
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict,
)
# Added an index? Create any PostgreSQL-specific indexes.
if (not (old_field.db_index or old_field.unique) and new_field.db_index) or (
not old_field.unique and new_field.unique
):
like_index_statement = self._create_like_index_sql(model, new_field)
if like_index_statement is not None:
self.execute(like_index_statement)
# Removed an index? Drop any PostgreSQL-specific indexes.
if old_field.unique and not (new_field.db_index or new_field.unique):
index_to_remove = self._create_index_name(
model._meta.db_table, [old_field.column], suffix="_like"
)
self.execute(self._delete_index_sql(model, index_to_remove))
def _index_columns(self, table, columns, col_suffixes, opclasses):
if opclasses:
return IndexColumns(
table,
columns,
self.quote_name,
col_suffixes=col_suffixes,
opclasses=opclasses,
)
return super()._index_columns(table, columns, col_suffixes, opclasses)
def add_index(self, model, index, concurrently=False):
self.execute(
index.create_sql(model, self, concurrently=concurrently), params=None
)
def remove_index(self, model, index, concurrently=False):
self.execute(index.remove_sql(model, self, concurrently=concurrently))
def _delete_index_sql(self, model, name, sql=None, concurrently=False):
sql = (
self.sql_delete_index_concurrently
if concurrently
else self.sql_delete_index
)
return super()._delete_index_sql(model, name, sql)
def _create_index_sql(
self,
model,
*,
fields=None,
name=None,
suffix="",
using="",
db_tablespace=None,
col_suffixes=(),
sql=None,
opclasses=(),
condition=None,
concurrently=False,
include=None,
expressions=None,
):
sql = (
self.sql_create_index
if not concurrently
else self.sql_create_index_concurrently
)
return super()._create_index_sql(
model,
fields=fields,
name=name,
suffix=suffix,
using=using,
db_tablespace=db_tablespace,
col_suffixes=col_suffixes,
sql=sql,
opclasses=opclasses,
condition=condition,
include=include,
expressions=expressions,
)
|
373fcd4f899c80015e12a1c0cc10681a9c838d85c829fac65630a5826c9c171a | import ipaddress
from functools import lru_cache
try:
from psycopg import ClientCursor, IsolationLevel, adapt, adapters, errors, sql
from psycopg.postgres import types
from psycopg.types.datetime import TimestamptzLoader
from psycopg.types.json import Jsonb
from psycopg.types.range import Range, RangeDumper
from psycopg.types.string import TextLoader
Inet = ipaddress.ip_address
DateRange = DateTimeRange = DateTimeTZRange = NumericRange = Range
RANGE_TYPES = (Range,)
TSRANGE_OID = types["tsrange"].oid
TSTZRANGE_OID = types["tstzrange"].oid
def mogrify(sql, params, connection):
return ClientCursor(connection.connection).mogrify(sql, params)
# Adapters.
class BaseTzLoader(TimestamptzLoader):
"""
Load a PostgreSQL timestamptz using the a specific timezone.
The timezone can be None too, in which case it will be chopped.
"""
timezone = None
def load(self, data):
res = super().load(data)
return res.replace(tzinfo=self.timezone)
def register_tzloader(tz, context):
class SpecificTzLoader(BaseTzLoader):
timezone = tz
context.adapters.register_loader("timestamptz", SpecificTzLoader)
class DjangoRangeDumper(RangeDumper):
"""A Range dumper customized for Django."""
def upgrade(self, obj, format):
# Dump ranges containing naive datetimes as tstzrange, because
# Django doesn't use tz-aware ones.
dumper = super().upgrade(obj, format)
if dumper is not self and dumper.oid == TSRANGE_OID:
dumper.oid = TSTZRANGE_OID
return dumper
@lru_cache
def get_adapters_template(use_tz, timezone):
# Create at adapters map extending the base one.
ctx = adapt.AdaptersMap(adapters)
# Register a no-op dumper to avoid a round trip from psycopg version 3
# decode to json.dumps() to json.loads(), when using a custom decoder
# in JSONField.
ctx.register_loader("jsonb", TextLoader)
# Don't convert automatically from PostgreSQL network types to Python
# ipaddress.
ctx.register_loader("inet", TextLoader)
ctx.register_loader("cidr", TextLoader)
ctx.register_dumper(Range, DjangoRangeDumper)
# Register a timestamptz loader configured on self.timezone.
# This, however, can be overridden by create_cursor.
register_tzloader(timezone, ctx)
return ctx
is_psycopg3 = True
except ImportError:
from enum import IntEnum
from psycopg2 import errors, extensions, sql # NOQA
from psycopg2.extras import DateRange, DateTimeRange, DateTimeTZRange, Inet # NOQA
from psycopg2.extras import Json as Jsonb # NOQA
from psycopg2.extras import NumericRange, Range # NOQA
RANGE_TYPES = (DateRange, DateTimeRange, DateTimeTZRange, NumericRange)
class IsolationLevel(IntEnum):
READ_UNCOMMITTED = extensions.ISOLATION_LEVEL_READ_UNCOMMITTED
READ_COMMITTED = extensions.ISOLATION_LEVEL_READ_COMMITTED
REPEATABLE_READ = extensions.ISOLATION_LEVEL_REPEATABLE_READ
SERIALIZABLE = extensions.ISOLATION_LEVEL_SERIALIZABLE
def _quote(value, connection=None):
adapted = extensions.adapt(value)
if hasattr(adapted, "encoding"):
adapted.encoding = "utf8"
# getquoted() returns a quoted bytestring of the adapted value.
return adapted.getquoted().decode()
sql.quote = _quote
def mogrify(sql, params, connection):
with connection.cursor() as cursor:
return cursor.mogrify(sql, params).decode()
is_psycopg3 = False
|
bd3706a7a4459c2ff683b46119edc9354808de0ba30109e8cdbd14431d8a0ebe | import sys
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.base.creation import BaseDatabaseCreation
from django.db.backends.postgresql.psycopg_any import errors
from django.db.backends.utils import strip_quotes
class DatabaseCreation(BaseDatabaseCreation):
def _quote_name(self, name):
return self.connection.ops.quote_name(name)
def _get_database_create_suffix(self, encoding=None, template=None):
suffix = ""
if encoding:
suffix += " ENCODING '{}'".format(encoding)
if template:
suffix += " TEMPLATE {}".format(self._quote_name(template))
return suffix and "WITH" + suffix
def sql_table_creation_suffix(self):
test_settings = self.connection.settings_dict["TEST"]
if test_settings.get("COLLATION") is not None:
raise ImproperlyConfigured(
"PostgreSQL does not support collation setting at database "
"creation time."
)
return self._get_database_create_suffix(
encoding=test_settings["CHARSET"],
template=test_settings.get("TEMPLATE"),
)
def _database_exists(self, cursor, database_name):
cursor.execute(
"SELECT 1 FROM pg_catalog.pg_database WHERE datname = %s",
[strip_quotes(database_name)],
)
return cursor.fetchone() is not None
def _execute_create_test_db(self, cursor, parameters, keepdb=False):
try:
if keepdb and self._database_exists(cursor, parameters["dbname"]):
# If the database should be kept and it already exists, don't
# try to create a new one.
return
super()._execute_create_test_db(cursor, parameters, keepdb)
except Exception as e:
cause = e.__cause__
if cause and not isinstance(cause, errors.DuplicateDatabase):
# All errors except "database already exists" cancel tests.
self.log("Got an error creating the test database: %s" % e)
sys.exit(2)
elif not keepdb:
# If the database should be kept, ignore "database already
# exists".
raise
def _clone_test_db(self, suffix, verbosity, keepdb=False):
# CREATE DATABASE ... WITH TEMPLATE ... requires closing connections
# to the template database.
self.connection.close()
source_database_name = self.connection.settings_dict["NAME"]
target_database_name = self.get_test_db_clone_settings(suffix)["NAME"]
test_db_params = {
"dbname": self._quote_name(target_database_name),
"suffix": self._get_database_create_suffix(template=source_database_name),
}
with self._nodb_cursor() as cursor:
try:
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception:
try:
if verbosity >= 1:
self.log(
"Destroying old test database for alias %s..."
% (
self._get_database_display_str(
verbosity, target_database_name
),
)
)
cursor.execute("DROP DATABASE %(dbname)s" % test_db_params)
self._execute_create_test_db(cursor, test_db_params, keepdb)
except Exception as e:
self.log("Got an error cloning the test database: %s" % e)
sys.exit(2)
|
e2e8cc664513081c542aaaea9cd9d920121872951c716394604e14b4f6474895 | import operator
from django.db import transaction
from django.db.backends.base.features import BaseDatabaseFeatures
from django.db.utils import OperationalError
from django.utils.functional import cached_property
from .base import Database
class DatabaseFeatures(BaseDatabaseFeatures):
minimum_database_version = (3, 21)
test_db_allows_multiple_connections = False
supports_unspecified_pk = True
supports_timezones = False
max_query_params = 999
supports_transactions = True
atomic_transactions = False
can_rollback_ddl = True
can_create_inline_fk = False
requires_literal_defaults = True
can_clone_databases = True
supports_temporal_subtraction = True
ignores_table_name_case = True
supports_cast_with_precision = False
time_cast_precision = 3
can_release_savepoints = True
has_case_insensitive_like = True
# Is "ALTER TABLE ... RENAME COLUMN" supported?
can_alter_table_rename_column = Database.sqlite_version_info >= (3, 25, 0)
# Is "ALTER TABLE ... DROP COLUMN" supported?
can_alter_table_drop_column = Database.sqlite_version_info >= (3, 35, 5)
supports_parentheses_in_compound = False
can_defer_constraint_checks = True
supports_over_clause = Database.sqlite_version_info >= (3, 25, 0)
supports_frame_range_fixed_distance = Database.sqlite_version_info >= (3, 28, 0)
supports_aggregate_filter_clause = Database.sqlite_version_info >= (3, 30, 1)
supports_order_by_nulls_modifier = Database.sqlite_version_info >= (3, 30, 0)
# NULLS LAST/FIRST emulation on < 3.30 requires subquery wrapping.
requires_compound_order_by_subquery = Database.sqlite_version_info < (3, 30)
order_by_nulls_first = True
supports_json_field_contains = False
supports_update_conflicts = Database.sqlite_version_info >= (3, 24, 0)
supports_update_conflicts_with_target = supports_update_conflicts
test_collations = {
"ci": "nocase",
"cs": "binary",
"non_default": "nocase",
}
django_test_expected_failures = {
# The django_format_dtdelta() function doesn't properly handle mixed
# Date/DateTime fields and timedeltas.
"expressions.tests.FTimeDeltaTests.test_mixed_comparisons1",
}
create_test_table_with_composite_primary_key = """
CREATE TABLE test_table_composite_pk (
column_1 INTEGER NOT NULL,
column_2 INTEGER NOT NULL,
PRIMARY KEY(column_1, column_2)
)
"""
@cached_property
def django_test_skips(self):
skips = {
"SQLite stores values rounded to 15 significant digits.": {
"model_fields.test_decimalfield.DecimalFieldTests."
"test_fetch_from_db_without_float_rounding",
},
"SQLite naively remakes the table on field alteration.": {
"schema.tests.SchemaTests.test_unique_no_unnecessary_fk_drops",
"schema.tests.SchemaTests.test_unique_and_reverse_m2m",
"schema.tests.SchemaTests."
"test_alter_field_default_doesnt_perform_queries",
"schema.tests.SchemaTests."
"test_rename_column_renames_deferred_sql_references",
},
"SQLite doesn't support negative precision for ROUND().": {
"db_functions.math.test_round.RoundTests."
"test_null_with_negative_precision",
"db_functions.math.test_round.RoundTests."
"test_decimal_with_negative_precision",
"db_functions.math.test_round.RoundTests."
"test_float_with_negative_precision",
"db_functions.math.test_round.RoundTests."
"test_integer_with_negative_precision",
},
}
if Database.sqlite_version_info < (3, 27):
skips.update(
{
"Nondeterministic failure on SQLite < 3.27.": {
"expressions_window.tests.WindowFunctionTests."
"test_subquery_row_range_rank",
},
}
)
if self.connection.is_in_memory_db():
skips.update(
{
"the sqlite backend's close() method is a no-op when using an "
"in-memory database": {
"servers.test_liveserverthread.LiveServerThreadTest."
"test_closes_connections",
"servers.tests.LiveServerTestCloseConnectionTest."
"test_closes_connections",
},
"For SQLite in-memory tests, closing the connection destroys"
"the database.": {
"test_utils.tests.AssertNumQueriesUponConnectionTests."
"test_ignores_connection_configuration_queries",
},
}
)
else:
skips.update(
{
"Only connections to in-memory SQLite databases are passed to the "
"server thread.": {
"servers.tests.LiveServerInMemoryDatabaseLockTest."
"test_in_memory_database_lock",
},
"multiprocessing's start method is checked only for in-memory "
"SQLite databases": {
"backends.sqlite.test_creation.TestDbSignatureTests."
"test_get_test_db_clone_settings_not_supported",
},
}
)
return skips
@cached_property
def supports_atomic_references_rename(self):
return Database.sqlite_version_info >= (3, 26, 0)
@cached_property
def introspected_field_types(self):
return {
**super().introspected_field_types,
"BigAutoField": "AutoField",
"DurationField": "BigIntegerField",
"GenericIPAddressField": "CharField",
"SmallAutoField": "AutoField",
}
@cached_property
def supports_json_field(self):
with self.connection.cursor() as cursor:
try:
with transaction.atomic(self.connection.alias):
cursor.execute('SELECT JSON(\'{"a": "b"}\')')
except OperationalError:
return False
return True
can_introspect_json_field = property(operator.attrgetter("supports_json_field"))
has_json_object_function = property(operator.attrgetter("supports_json_field"))
@cached_property
def can_return_columns_from_insert(self):
return Database.sqlite_version_info >= (3, 35)
can_return_rows_from_bulk_insert = property(
operator.attrgetter("can_return_columns_from_insert")
)
|
fe8d8de5cfe68ad874f4bac5dd1aa0e3934d876c4f2fd55c7c27452d20cdb4d4 | from collections import namedtuple
import sqlparse
from django.db import DatabaseError
from django.db.backends.base.introspection import BaseDatabaseIntrospection
from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo
from django.db.backends.base.introspection import TableInfo
from django.db.models import Index
from django.utils.regex_helper import _lazy_re_compile
FieldInfo = namedtuple(
"FieldInfo", BaseFieldInfo._fields + ("pk", "has_json_constraint")
)
field_size_re = _lazy_re_compile(r"^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$")
def get_field_size(name):
"""Extract the size number from a "varchar(11)" type name"""
m = field_size_re.search(name)
return int(m[1]) if m else None
# This light wrapper "fakes" a dictionary interface, because some SQLite data
# types include variables in them -- e.g. "varchar(30)" -- and can't be matched
# as a simple dictionary lookup.
class FlexibleFieldLookupDict:
# Maps SQL types to Django Field types. Some of the SQL types have multiple
# entries here because SQLite allows for anything and doesn't normalize the
# field type; it uses whatever was given.
base_data_types_reverse = {
"bool": "BooleanField",
"boolean": "BooleanField",
"smallint": "SmallIntegerField",
"smallint unsigned": "PositiveSmallIntegerField",
"smallinteger": "SmallIntegerField",
"int": "IntegerField",
"integer": "IntegerField",
"bigint": "BigIntegerField",
"integer unsigned": "PositiveIntegerField",
"bigint unsigned": "PositiveBigIntegerField",
"decimal": "DecimalField",
"real": "FloatField",
"text": "TextField",
"char": "CharField",
"varchar": "CharField",
"blob": "BinaryField",
"date": "DateField",
"datetime": "DateTimeField",
"time": "TimeField",
}
def __getitem__(self, key):
key = key.lower().split("(", 1)[0].strip()
return self.base_data_types_reverse[key]
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = FlexibleFieldLookupDict()
def get_field_type(self, data_type, description):
field_type = super().get_field_type(data_type, description)
if description.pk and field_type in {
"BigIntegerField",
"IntegerField",
"SmallIntegerField",
}:
# No support for BigAutoField or SmallAutoField as SQLite treats
# all integer primary keys as signed 64-bit integers.
return "AutoField"
if description.has_json_constraint:
return "JSONField"
return field_type
def get_table_list(self, cursor):
"""Return a list of table and view names in the current database."""
# Skip the sqlite_sequence system table used for autoincrement key
# generation.
cursor.execute(
"""
SELECT name, type FROM sqlite_master
WHERE type in ('table', 'view') AND NOT name='sqlite_sequence'
ORDER BY name"""
)
return [TableInfo(row[0], row[1][0]) for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"""
Return a description of the table with the DB-API cursor.description
interface.
"""
cursor.execute(
"PRAGMA table_info(%s)" % self.connection.ops.quote_name(table_name)
)
table_info = cursor.fetchall()
if not table_info:
raise DatabaseError(f"Table {table_name} does not exist (empty pragma).")
collations = self._get_column_collations(cursor, table_name)
json_columns = set()
if self.connection.features.can_introspect_json_field:
for line in table_info:
column = line[1]
json_constraint_sql = '%%json_valid("%s")%%' % column
has_json_constraint = cursor.execute(
"""
SELECT sql
FROM sqlite_master
WHERE
type = 'table' AND
name = %s AND
sql LIKE %s
""",
[table_name, json_constraint_sql],
).fetchone()
if has_json_constraint:
json_columns.add(column)
return [
FieldInfo(
name,
data_type,
get_field_size(data_type),
None,
None,
None,
not notnull,
default,
collations.get(name),
pk == 1,
name in json_columns,
)
for cid, name, data_type, notnull, default, pk in table_info
]
def get_sequences(self, cursor, table_name, table_fields=()):
pk_col = self.get_primary_key_column(cursor, table_name)
return [{"table": table_name, "column": pk_col}]
def get_relations(self, cursor, table_name):
"""
Return a dictionary of {column_name: (ref_column_name, ref_table_name)}
representing all foreign keys in the given table.
"""
cursor.execute(
"PRAGMA foreign_key_list(%s)" % self.connection.ops.quote_name(table_name)
)
return {
column_name: (ref_column_name, ref_table_name)
for (
_,
_,
ref_table_name,
column_name,
ref_column_name,
*_,
) in cursor.fetchall()
}
def get_primary_key_columns(self, cursor, table_name):
cursor.execute(
"PRAGMA table_info(%s)" % self.connection.ops.quote_name(table_name)
)
return [name for _, name, *_, pk in cursor.fetchall() if pk]
def _parse_column_or_constraint_definition(self, tokens, columns):
token = None
is_constraint_definition = None
field_name = None
constraint_name = None
unique = False
unique_columns = []
check = False
check_columns = []
braces_deep = 0
for token in tokens:
if token.match(sqlparse.tokens.Punctuation, "("):
braces_deep += 1
elif token.match(sqlparse.tokens.Punctuation, ")"):
braces_deep -= 1
if braces_deep < 0:
# End of columns and constraints for table definition.
break
elif braces_deep == 0 and token.match(sqlparse.tokens.Punctuation, ","):
# End of current column or constraint definition.
break
# Detect column or constraint definition by first token.
if is_constraint_definition is None:
is_constraint_definition = token.match(
sqlparse.tokens.Keyword, "CONSTRAINT"
)
if is_constraint_definition:
continue
if is_constraint_definition:
# Detect constraint name by second token.
if constraint_name is None:
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
constraint_name = token.value
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
constraint_name = token.value[1:-1]
# Start constraint columns parsing after UNIQUE keyword.
if token.match(sqlparse.tokens.Keyword, "UNIQUE"):
unique = True
unique_braces_deep = braces_deep
elif unique:
if unique_braces_deep == braces_deep:
if unique_columns:
# Stop constraint parsing.
unique = False
continue
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
unique_columns.append(token.value)
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
unique_columns.append(token.value[1:-1])
else:
# Detect field name by first token.
if field_name is None:
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
field_name = token.value
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
field_name = token.value[1:-1]
if token.match(sqlparse.tokens.Keyword, "UNIQUE"):
unique_columns = [field_name]
# Start constraint columns parsing after CHECK keyword.
if token.match(sqlparse.tokens.Keyword, "CHECK"):
check = True
check_braces_deep = braces_deep
elif check:
if check_braces_deep == braces_deep:
if check_columns:
# Stop constraint parsing.
check = False
continue
if token.ttype in (sqlparse.tokens.Name, sqlparse.tokens.Keyword):
if token.value in columns:
check_columns.append(token.value)
elif token.ttype == sqlparse.tokens.Literal.String.Symbol:
if token.value[1:-1] in columns:
check_columns.append(token.value[1:-1])
unique_constraint = (
{
"unique": True,
"columns": unique_columns,
"primary_key": False,
"foreign_key": None,
"check": False,
"index": False,
}
if unique_columns
else None
)
check_constraint = (
{
"check": True,
"columns": check_columns,
"primary_key": False,
"unique": False,
"foreign_key": None,
"index": False,
}
if check_columns
else None
)
return constraint_name, unique_constraint, check_constraint, token
def _parse_table_constraints(self, sql, columns):
# Check constraint parsing is based of SQLite syntax diagram.
# https://www.sqlite.org/syntaxdiagrams.html#table-constraint
statement = sqlparse.parse(sql)[0]
constraints = {}
unnamed_constrains_index = 0
tokens = (token for token in statement.flatten() if not token.is_whitespace)
# Go to columns and constraint definition
for token in tokens:
if token.match(sqlparse.tokens.Punctuation, "("):
break
# Parse columns and constraint definition
while True:
(
constraint_name,
unique,
check,
end_token,
) = self._parse_column_or_constraint_definition(tokens, columns)
if unique:
if constraint_name:
constraints[constraint_name] = unique
else:
unnamed_constrains_index += 1
constraints[
"__unnamed_constraint_%s__" % unnamed_constrains_index
] = unique
if check:
if constraint_name:
constraints[constraint_name] = check
else:
unnamed_constrains_index += 1
constraints[
"__unnamed_constraint_%s__" % unnamed_constrains_index
] = check
if end_token.match(sqlparse.tokens.Punctuation, ")"):
break
return constraints
def get_constraints(self, cursor, table_name):
"""
Retrieve any constraints or keys (unique, pk, fk, check, index) across
one or more columns.
"""
constraints = {}
# Find inline check constraints.
try:
table_schema = cursor.execute(
"SELECT sql FROM sqlite_master WHERE type='table' and name=%s"
% (self.connection.ops.quote_name(table_name),)
).fetchone()[0]
except TypeError:
# table_name is a view.
pass
else:
columns = {
info.name for info in self.get_table_description(cursor, table_name)
}
constraints.update(self._parse_table_constraints(table_schema, columns))
# Get the index info
cursor.execute(
"PRAGMA index_list(%s)" % self.connection.ops.quote_name(table_name)
)
for row in cursor.fetchall():
# SQLite 3.8.9+ has 5 columns, however older versions only give 3
# columns. Discard last 2 columns if there.
number, index, unique = row[:3]
cursor.execute(
"SELECT sql FROM sqlite_master "
"WHERE type='index' AND name=%s" % self.connection.ops.quote_name(index)
)
# There's at most one row.
(sql,) = cursor.fetchone() or (None,)
# Inline constraints are already detected in
# _parse_table_constraints(). The reasons to avoid fetching inline
# constraints from `PRAGMA index_list` are:
# - Inline constraints can have a different name and information
# than what `PRAGMA index_list` gives.
# - Not all inline constraints may appear in `PRAGMA index_list`.
if not sql:
# An inline constraint
continue
# Get the index info for that index
cursor.execute(
"PRAGMA index_info(%s)" % self.connection.ops.quote_name(index)
)
for index_rank, column_rank, column in cursor.fetchall():
if index not in constraints:
constraints[index] = {
"columns": [],
"primary_key": False,
"unique": bool(unique),
"foreign_key": None,
"check": False,
"index": True,
}
constraints[index]["columns"].append(column)
# Add type and column orders for indexes
if constraints[index]["index"]:
# SQLite doesn't support any index type other than b-tree
constraints[index]["type"] = Index.suffix
orders = self._get_index_columns_orders(sql)
if orders is not None:
constraints[index]["orders"] = orders
# Get the PK
pk_columns = self.get_primary_key_columns(cursor, table_name)
if pk_columns:
# SQLite doesn't actually give a name to the PK constraint,
# so we invent one. This is fine, as the SQLite backend never
# deletes PK constraints by name, as you can't delete constraints
# in SQLite; we remake the table with a new PK instead.
constraints["__primary__"] = {
"columns": pk_columns,
"primary_key": True,
"unique": False, # It's not actually a unique constraint.
"foreign_key": None,
"check": False,
"index": False,
}
relations = enumerate(self.get_relations(cursor, table_name).items())
constraints.update(
{
f"fk_{index}": {
"columns": [column_name],
"primary_key": False,
"unique": False,
"foreign_key": (ref_table_name, ref_column_name),
"check": False,
"index": False,
}
for index, (column_name, (ref_column_name, ref_table_name)) in relations
}
)
return constraints
def _get_index_columns_orders(self, sql):
tokens = sqlparse.parse(sql)[0]
for token in tokens:
if isinstance(token, sqlparse.sql.Parenthesis):
columns = str(token).strip("()").split(", ")
return ["DESC" if info.endswith("DESC") else "ASC" for info in columns]
return None
def _get_column_collations(self, cursor, table_name):
row = cursor.execute(
"""
SELECT sql
FROM sqlite_master
WHERE type = 'table' AND name = %s
""",
[table_name],
).fetchone()
if not row:
return {}
sql = row[0]
columns = str(sqlparse.parse(sql)[0][-1]).strip("()").split(", ")
collations = {}
for column in columns:
tokens = column[1:].split()
column_name = tokens[0].strip('"')
for index, token in enumerate(tokens):
if token == "COLLATE":
collation = tokens[index + 1]
break
else:
collation = None
collations[column_name] = collation
return collations
|
c249f14d4e7a96ea84ab564b8447fa60e322c5d92d82176520fcf7d201e05a60 | """
SQLite backend for the sqlite3 module in the standard library.
"""
import datetime
import decimal
import warnings
from collections.abc import Mapping
from itertools import chain, tee
from sqlite3 import dbapi2 as Database
from django.core.exceptions import ImproperlyConfigured
from django.db import IntegrityError
from django.db.backends.base.base import BaseDatabaseWrapper
from django.utils.asyncio import async_unsafe
from django.utils.dateparse import parse_date, parse_datetime, parse_time
from django.utils.regex_helper import _lazy_re_compile
from ._functions import register as register_functions
from .client import DatabaseClient
from .creation import DatabaseCreation
from .features import DatabaseFeatures
from .introspection import DatabaseIntrospection
from .operations import DatabaseOperations
from .schema import DatabaseSchemaEditor
def decoder(conv_func):
"""
Convert bytestrings from Python's sqlite3 interface to a regular string.
"""
return lambda s: conv_func(s.decode())
def adapt_date(val):
return val.isoformat()
def adapt_datetime(val):
return val.isoformat(" ")
Database.register_converter("bool", b"1".__eq__)
Database.register_converter("date", decoder(parse_date))
Database.register_converter("time", decoder(parse_time))
Database.register_converter("datetime", decoder(parse_datetime))
Database.register_converter("timestamp", decoder(parse_datetime))
Database.register_adapter(decimal.Decimal, str)
Database.register_adapter(datetime.date, adapt_date)
Database.register_adapter(datetime.datetime, adapt_datetime)
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = "sqlite"
display_name = "SQLite"
# SQLite doesn't actually support most of these types, but it "does the right
# thing" given more verbose field definitions, so leave them as is so that
# schema inspection is more useful.
data_types = {
"AutoField": "integer",
"BigAutoField": "integer",
"BinaryField": "BLOB",
"BooleanField": "bool",
"CharField": "varchar(%(max_length)s)",
"DateField": "date",
"DateTimeField": "datetime",
"DecimalField": "decimal",
"DurationField": "bigint",
"FileField": "varchar(%(max_length)s)",
"FilePathField": "varchar(%(max_length)s)",
"FloatField": "real",
"IntegerField": "integer",
"BigIntegerField": "bigint",
"IPAddressField": "char(15)",
"GenericIPAddressField": "char(39)",
"JSONField": "text",
"OneToOneField": "integer",
"PositiveBigIntegerField": "bigint unsigned",
"PositiveIntegerField": "integer unsigned",
"PositiveSmallIntegerField": "smallint unsigned",
"SlugField": "varchar(%(max_length)s)",
"SmallAutoField": "integer",
"SmallIntegerField": "smallint",
"TextField": "text",
"TimeField": "time",
"UUIDField": "char(32)",
}
data_type_check_constraints = {
"PositiveBigIntegerField": '"%(column)s" >= 0',
"JSONField": '(JSON_VALID("%(column)s") OR "%(column)s" IS NULL)',
"PositiveIntegerField": '"%(column)s" >= 0',
"PositiveSmallIntegerField": '"%(column)s" >= 0',
}
data_types_suffix = {
"AutoField": "AUTOINCREMENT",
"BigAutoField": "AUTOINCREMENT",
"SmallAutoField": "AUTOINCREMENT",
}
# SQLite requires LIKE statements to include an ESCAPE clause if the value
# being escaped has a percent or underscore in it.
# See https://www.sqlite.org/lang_expr.html for an explanation.
operators = {
"exact": "= %s",
"iexact": "LIKE %s ESCAPE '\\'",
"contains": "LIKE %s ESCAPE '\\'",
"icontains": "LIKE %s ESCAPE '\\'",
"regex": "REGEXP %s",
"iregex": "REGEXP '(?i)' || %s",
"gt": "> %s",
"gte": ">= %s",
"lt": "< %s",
"lte": "<= %s",
"startswith": "LIKE %s ESCAPE '\\'",
"endswith": "LIKE %s ESCAPE '\\'",
"istartswith": "LIKE %s ESCAPE '\\'",
"iendswith": "LIKE %s ESCAPE '\\'",
}
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
pattern_ops = {
"contains": r"LIKE '%%' || {} || '%%' ESCAPE '\'",
"icontains": r"LIKE '%%' || UPPER({}) || '%%' ESCAPE '\'",
"startswith": r"LIKE {} || '%%' ESCAPE '\'",
"istartswith": r"LIKE UPPER({}) || '%%' ESCAPE '\'",
"endswith": r"LIKE '%%' || {} ESCAPE '\'",
"iendswith": r"LIKE '%%' || UPPER({}) ESCAPE '\'",
}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
# Classes instantiated in __init__().
client_class = DatabaseClient
creation_class = DatabaseCreation
features_class = DatabaseFeatures
introspection_class = DatabaseIntrospection
ops_class = DatabaseOperations
def get_connection_params(self):
settings_dict = self.settings_dict
if not settings_dict["NAME"]:
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value."
)
kwargs = {
"database": settings_dict["NAME"],
"detect_types": Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
**settings_dict["OPTIONS"],
}
# Always allow the underlying SQLite connection to be shareable
# between multiple threads. The safe-guarding will be handled at a
# higher level by the `BaseDatabaseWrapper.allow_thread_sharing`
# property. This is necessary as the shareability is disabled by
# default in sqlite3 and it cannot be changed once a connection is
# opened.
if "check_same_thread" in kwargs and kwargs["check_same_thread"]:
warnings.warn(
"The `check_same_thread` option was provided and set to "
"True. It will be overridden with False. Use the "
"`DatabaseWrapper.allow_thread_sharing` property instead "
"for controlling thread shareability.",
RuntimeWarning,
)
kwargs.update({"check_same_thread": False, "uri": True})
return kwargs
def get_database_version(self):
return self.Database.sqlite_version_info
@async_unsafe
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
register_functions(conn)
conn.execute("PRAGMA foreign_keys = ON")
# The macOS bundled SQLite defaults legacy_alter_table ON, which
# prevents atomic table renames (feature supports_atomic_references_rename)
conn.execute("PRAGMA legacy_alter_table = OFF")
return conn
def create_cursor(self, name=None):
return self.connection.cursor(factory=SQLiteCursorWrapper)
@async_unsafe
def close(self):
self.validate_thread_sharing()
# If database is in memory, closing the connection destroys the
# database. To prevent accidental data loss, ignore close requests on
# an in-memory db.
if not self.is_in_memory_db():
BaseDatabaseWrapper.close(self)
def _savepoint_allowed(self):
# When 'isolation_level' is not None, sqlite3 commits before each
# savepoint; it's a bug. When it is None, savepoints don't make sense
# because autocommit is enabled. The only exception is inside 'atomic'
# blocks. To work around that bug, on SQLite, 'atomic' starts a
# transaction explicitly rather than simply disable autocommit.
return self.in_atomic_block
def _set_autocommit(self, autocommit):
if autocommit:
level = None
else:
# sqlite3's internal default is ''. It's different from None.
# See Modules/_sqlite/connection.c.
level = ""
# 'isolation_level' is a misleading API.
# SQLite always runs at the SERIALIZABLE isolation level.
with self.wrap_database_errors:
self.connection.isolation_level = level
def disable_constraint_checking(self):
with self.cursor() as cursor:
cursor.execute("PRAGMA foreign_keys = OFF")
# Foreign key constraints cannot be turned off while in a multi-
# statement transaction. Fetch the current state of the pragma
# to determine if constraints are effectively disabled.
enabled = cursor.execute("PRAGMA foreign_keys").fetchone()[0]
return not bool(enabled)
def enable_constraint_checking(self):
with self.cursor() as cursor:
cursor.execute("PRAGMA foreign_keys = ON")
def check_constraints(self, table_names=None):
"""
Check each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
"""
with self.cursor() as cursor:
if table_names is None:
violations = cursor.execute("PRAGMA foreign_key_check").fetchall()
else:
violations = chain.from_iterable(
cursor.execute(
"PRAGMA foreign_key_check(%s)" % self.ops.quote_name(table_name)
).fetchall()
for table_name in table_names
)
# See https://www.sqlite.org/pragma.html#pragma_foreign_key_check
for (
table_name,
rowid,
referenced_table_name,
foreign_key_index,
) in violations:
foreign_key = cursor.execute(
"PRAGMA foreign_key_list(%s)" % self.ops.quote_name(table_name)
).fetchall()[foreign_key_index]
column_name, referenced_column_name = foreign_key[3:5]
primary_key_column_name = self.introspection.get_primary_key_column(
cursor, table_name
)
primary_key_value, bad_value = cursor.execute(
"SELECT %s, %s FROM %s WHERE rowid = %%s"
% (
self.ops.quote_name(primary_key_column_name),
self.ops.quote_name(column_name),
self.ops.quote_name(table_name),
),
(rowid,),
).fetchone()
raise IntegrityError(
"The row in table '%s' with primary key '%s' has an "
"invalid foreign key: %s.%s contains a value '%s' that "
"does not have a corresponding value in %s.%s."
% (
table_name,
primary_key_value,
table_name,
column_name,
bad_value,
referenced_table_name,
referenced_column_name,
)
)
def is_usable(self):
return True
def _start_transaction_under_autocommit(self):
"""
Start a transaction explicitly in autocommit mode.
Staying in autocommit mode works around a bug of sqlite3 that breaks
savepoints when autocommit is disabled.
"""
self.cursor().execute("BEGIN")
def is_in_memory_db(self):
return self.creation.is_in_memory_db(self.settings_dict["NAME"])
FORMAT_QMARK_REGEX = _lazy_re_compile(r"(?<!%)%s")
class SQLiteCursorWrapper(Database.Cursor):
"""
Django uses the "format" and "pyformat" styles, but Python's sqlite3 module
supports neither of these styles.
This wrapper performs the following conversions:
- "format" style to "qmark" style
- "pyformat" style to "named" style
In both cases, if you want to use a literal "%s", you'll need to use "%%s".
"""
def execute(self, query, params=None):
if params is None:
return super().execute(query)
# Extract names if params is a mapping, i.e. "pyformat" style is used.
param_names = list(params) if isinstance(params, Mapping) else None
query = self.convert_query(query, param_names=param_names)
return super().execute(query, params)
def executemany(self, query, param_list):
# Extract names if params is a mapping, i.e. "pyformat" style is used.
# Peek carefully as a generator can be passed instead of a list/tuple.
peekable, param_list = tee(iter(param_list))
if (params := next(peekable, None)) and isinstance(params, Mapping):
param_names = list(params)
else:
param_names = None
query = self.convert_query(query, param_names=param_names)
return super().executemany(query, param_list)
def convert_query(self, query, *, param_names=None):
if param_names is None:
# Convert from "format" style to "qmark" style.
return FORMAT_QMARK_REGEX.sub("?", query).replace("%%", "%")
else:
# Convert from "pyformat" style to "named" style.
return query % {name: f":{name}" for name in param_names}
|
4a77175d87b70e51b288081bd808ef79dfe8cf7c146654682e3cc5dffbb673e4 | import logging
import sys
import tempfile
import traceback
from asgiref.sync import ThreadSensitiveContext, sync_to_async
from django.conf import settings
from django.core import signals
from django.core.exceptions import RequestAborted, RequestDataTooBig
from django.core.handlers import base
from django.http import (
FileResponse,
HttpRequest,
HttpResponse,
HttpResponseBadRequest,
HttpResponseServerError,
QueryDict,
parse_cookie,
)
from django.urls import set_script_prefix
from django.utils.asyncio import aclosing
from django.utils.functional import cached_property
logger = logging.getLogger("django.request")
class ASGIRequest(HttpRequest):
"""
Custom request subclass that decodes from an ASGI-standard request dict
and wraps request body handling.
"""
# Number of seconds until a Request gives up on trying to read a request
# body and aborts.
body_receive_timeout = 60
def __init__(self, scope, body_file):
self.scope = scope
self._post_parse_error = False
self._read_started = False
self.resolver_match = None
self.script_name = self.scope.get("root_path", "")
if self.script_name and scope["path"].startswith(self.script_name):
# TODO: Better is-prefix checking, slash handling?
self.path_info = scope["path"][len(self.script_name) :]
else:
self.path_info = scope["path"]
# The Django path is different from ASGI scope path args, it should
# combine with script name.
if self.script_name:
self.path = "%s/%s" % (
self.script_name.rstrip("/"),
self.path_info.replace("/", "", 1),
)
else:
self.path = scope["path"]
# HTTP basics.
self.method = self.scope["method"].upper()
# Ensure query string is encoded correctly.
query_string = self.scope.get("query_string", "")
if isinstance(query_string, bytes):
query_string = query_string.decode()
self.META = {
"REQUEST_METHOD": self.method,
"QUERY_STRING": query_string,
"SCRIPT_NAME": self.script_name,
"PATH_INFO": self.path_info,
# WSGI-expecting code will need these for a while
"wsgi.multithread": True,
"wsgi.multiprocess": True,
}
if self.scope.get("client"):
self.META["REMOTE_ADDR"] = self.scope["client"][0]
self.META["REMOTE_HOST"] = self.META["REMOTE_ADDR"]
self.META["REMOTE_PORT"] = self.scope["client"][1]
if self.scope.get("server"):
self.META["SERVER_NAME"] = self.scope["server"][0]
self.META["SERVER_PORT"] = str(self.scope["server"][1])
else:
self.META["SERVER_NAME"] = "unknown"
self.META["SERVER_PORT"] = "0"
# Headers go into META.
for name, value in self.scope.get("headers", []):
name = name.decode("latin1")
if name == "content-length":
corrected_name = "CONTENT_LENGTH"
elif name == "content-type":
corrected_name = "CONTENT_TYPE"
else:
corrected_name = "HTTP_%s" % name.upper().replace("-", "_")
# HTTP/2 say only ASCII chars are allowed in headers, but decode
# latin1 just in case.
value = value.decode("latin1")
if corrected_name in self.META:
value = self.META[corrected_name] + "," + value
self.META[corrected_name] = value
# Pull out request encoding, if provided.
self._set_content_type_params(self.META)
# Directly assign the body file to be our stream.
self._stream = body_file
# Other bits.
self.resolver_match = None
@cached_property
def GET(self):
return QueryDict(self.META["QUERY_STRING"])
def _get_scheme(self):
return self.scope.get("scheme") or super()._get_scheme()
def _get_post(self):
if not hasattr(self, "_post"):
self._load_post_and_files()
return self._post
def _set_post(self, post):
self._post = post
def _get_files(self):
if not hasattr(self, "_files"):
self._load_post_and_files()
return self._files
POST = property(_get_post, _set_post)
FILES = property(_get_files)
@cached_property
def COOKIES(self):
return parse_cookie(self.META.get("HTTP_COOKIE", ""))
def close(self):
super().close()
self._stream.close()
class ASGIHandler(base.BaseHandler):
"""Handler for ASGI requests."""
request_class = ASGIRequest
# Size to chunk response bodies into for multiple response messages.
chunk_size = 2**16
def __init__(self):
super().__init__()
self.load_middleware(is_async=True)
async def __call__(self, scope, receive, send):
"""
Async entrypoint - parses the request and hands off to get_response.
"""
# Serve only HTTP connections.
# FIXME: Allow to override this.
if scope["type"] != "http":
raise ValueError(
"Django can only handle ASGI/HTTP connections, not %s." % scope["type"]
)
async with ThreadSensitiveContext():
await self.handle(scope, receive, send)
async def handle(self, scope, receive, send):
"""
Handles the ASGI request. Called via the __call__ method.
"""
# Receive the HTTP request body as a stream object.
try:
body_file = await self.read_body(receive)
except RequestAborted:
return
# Request is complete and can be served.
set_script_prefix(self.get_script_prefix(scope))
await sync_to_async(signals.request_started.send, thread_sensitive=True)(
sender=self.__class__, scope=scope
)
# Get the request and check for basic issues.
request, error_response = self.create_request(scope, body_file)
if request is None:
body_file.close()
await self.send_response(error_response, send)
return
# Get the response, using the async mode of BaseHandler.
response = await self.get_response_async(request)
response._handler_class = self.__class__
# Increase chunk size on file responses (ASGI servers handles low-level
# chunking).
if isinstance(response, FileResponse):
response.block_size = self.chunk_size
# Send the response.
await self.send_response(response, send)
async def read_body(self, receive):
"""Reads an HTTP body from an ASGI connection."""
# Use the tempfile that auto rolls-over to a disk file as it fills up.
body_file = tempfile.SpooledTemporaryFile(
max_size=settings.FILE_UPLOAD_MAX_MEMORY_SIZE, mode="w+b"
)
while True:
message = await receive()
if message["type"] == "http.disconnect":
body_file.close()
# Early client disconnect.
raise RequestAborted()
# Add a body chunk from the message, if provided.
if "body" in message:
body_file.write(message["body"])
# Quit out if that's the end.
if not message.get("more_body", False):
break
body_file.seek(0)
return body_file
def create_request(self, scope, body_file):
"""
Create the Request object and returns either (request, None) or
(None, response) if there is an error response.
"""
try:
return self.request_class(scope, body_file), None
except UnicodeDecodeError:
logger.warning(
"Bad Request (UnicodeDecodeError)",
exc_info=sys.exc_info(),
extra={"status_code": 400},
)
return None, HttpResponseBadRequest()
except RequestDataTooBig:
return None, HttpResponse("413 Payload too large", status=413)
def handle_uncaught_exception(self, request, resolver, exc_info):
"""Last-chance handler for exceptions."""
# There's no WSGI server to catch the exception further up
# if this fails, so translate it into a plain text response.
try:
return super().handle_uncaught_exception(request, resolver, exc_info)
except Exception:
return HttpResponseServerError(
traceback.format_exc() if settings.DEBUG else "Internal Server Error",
content_type="text/plain",
)
async def send_response(self, response, send):
"""Encode and send a response out over ASGI."""
# Collect cookies into headers. Have to preserve header case as there
# are some non-RFC compliant clients that require e.g. Content-Type.
response_headers = []
for header, value in response.items():
if isinstance(header, str):
header = header.encode("ascii")
if isinstance(value, str):
value = value.encode("latin1")
response_headers.append((bytes(header), bytes(value)))
for c in response.cookies.values():
response_headers.append(
(b"Set-Cookie", c.output(header="").encode("ascii").strip())
)
# Initial response message.
await send(
{
"type": "http.response.start",
"status": response.status_code,
"headers": response_headers,
}
)
# Streaming responses need to be pinned to their iterator.
if response.streaming:
# - Consume via `__aiter__` and not `streaming_content` directly, to
# allow mapping of a sync iterator.
# - Use aclosing() when consuming aiter.
# See https://github.com/python/cpython/commit/6e8dcda
async with aclosing(response.__aiter__()) as content:
async for part in content:
for chunk, _ in self.chunk_bytes(part):
await send(
{
"type": "http.response.body",
"body": chunk,
# Ignore "more" as there may be more parts; instead,
# use an empty final closing message with False.
"more_body": True,
}
)
# Final closing message.
await send({"type": "http.response.body"})
# Other responses just need chunking.
else:
# Yield chunks of response.
for chunk, last in self.chunk_bytes(response.content):
await send(
{
"type": "http.response.body",
"body": chunk,
"more_body": not last,
}
)
await sync_to_async(response.close, thread_sensitive=True)()
@classmethod
def chunk_bytes(cls, data):
"""
Chunks some data up so it can be sent in reasonable size messages.
Yields (chunk, last_chunk) tuples.
"""
position = 0
if not data:
yield data, True
return
while position < len(data):
yield (
data[position : position + cls.chunk_size],
(position + cls.chunk_size) >= len(data),
)
position += cls.chunk_size
def get_script_prefix(self, scope):
"""
Return the script prefix to use from either the scope or a setting.
"""
if settings.FORCE_SCRIPT_NAME:
return settings.FORCE_SCRIPT_NAME
return scope.get("root_path", "") or ""
|
bb22a13f4d362838c493a3dbef14b703deae4c10741bade08049a701fba762e2 | import logging
import sys
from functools import wraps
from asgiref.sync import iscoroutinefunction, sync_to_async
from django.conf import settings
from django.core import signals
from django.core.exceptions import (
BadRequest,
PermissionDenied,
RequestDataTooBig,
SuspiciousOperation,
TooManyFieldsSent,
)
from django.http import Http404
from django.http.multipartparser import MultiPartParserError
from django.urls import get_resolver, get_urlconf
from django.utils.log import log_response
from django.views import debug
def convert_exception_to_response(get_response):
"""
Wrap the given get_response callable in exception-to-response conversion.
All exceptions will be converted. All known 4xx exceptions (Http404,
PermissionDenied, MultiPartParserError, SuspiciousOperation) will be
converted to the appropriate response, and all other exceptions will be
converted to 500 responses.
This decorator is automatically applied to all middleware to ensure that
no middleware leaks an exception and that the next middleware in the stack
can rely on getting a response instead of an exception.
"""
if iscoroutinefunction(get_response):
@wraps(get_response)
async def inner(request):
try:
response = await get_response(request)
except Exception as exc:
response = await sync_to_async(
response_for_exception, thread_sensitive=False
)(request, exc)
return response
return inner
else:
@wraps(get_response)
def inner(request):
try:
response = get_response(request)
except Exception as exc:
response = response_for_exception(request, exc)
return response
return inner
def response_for_exception(request, exc):
if isinstance(exc, Http404):
if settings.DEBUG:
response = debug.technical_404_response(request, exc)
else:
response = get_exception_response(
request, get_resolver(get_urlconf()), 404, exc
)
elif isinstance(exc, PermissionDenied):
response = get_exception_response(
request, get_resolver(get_urlconf()), 403, exc
)
log_response(
"Forbidden (Permission denied): %s",
request.path,
response=response,
request=request,
exception=exc,
)
elif isinstance(exc, MultiPartParserError):
response = get_exception_response(
request, get_resolver(get_urlconf()), 400, exc
)
log_response(
"Bad request (Unable to parse request body): %s",
request.path,
response=response,
request=request,
exception=exc,
)
elif isinstance(exc, BadRequest):
if settings.DEBUG:
response = debug.technical_500_response(
request, *sys.exc_info(), status_code=400
)
else:
response = get_exception_response(
request, get_resolver(get_urlconf()), 400, exc
)
log_response(
"%s: %s",
str(exc),
request.path,
response=response,
request=request,
exception=exc,
)
elif isinstance(exc, SuspiciousOperation):
if isinstance(exc, (RequestDataTooBig, TooManyFieldsSent)):
# POST data can't be accessed again, otherwise the original
# exception would be raised.
request._mark_post_parse_error()
# The request logger receives events for any problematic request
# The security logger receives events for all SuspiciousOperations
security_logger = logging.getLogger(
"django.security.%s" % exc.__class__.__name__
)
security_logger.error(
str(exc),
exc_info=exc,
extra={"status_code": 400, "request": request},
)
if settings.DEBUG:
response = debug.technical_500_response(
request, *sys.exc_info(), status_code=400
)
else:
response = get_exception_response(
request, get_resolver(get_urlconf()), 400, exc
)
else:
signals.got_request_exception.send(sender=None, request=request)
response = handle_uncaught_exception(
request, get_resolver(get_urlconf()), sys.exc_info()
)
log_response(
"%s: %s",
response.reason_phrase,
request.path,
response=response,
request=request,
exception=exc,
)
# Force a TemplateResponse to be rendered.
if not getattr(response, "is_rendered", True) and callable(
getattr(response, "render", None)
):
response = response.render()
return response
def get_exception_response(request, resolver, status_code, exception):
try:
callback = resolver.resolve_error_handler(status_code)
response = callback(request, exception=exception)
except Exception:
signals.got_request_exception.send(sender=None, request=request)
response = handle_uncaught_exception(request, resolver, sys.exc_info())
return response
def handle_uncaught_exception(request, resolver, exc_info):
"""
Processing for any otherwise uncaught exceptions (those that will
generate HTTP 500 responses).
"""
if settings.DEBUG_PROPAGATE_EXCEPTIONS:
raise
if settings.DEBUG:
return debug.technical_500_response(request, *exc_info)
# Return an HttpResponse that displays a friendly error message.
callback = resolver.resolve_error_handler(500)
return callback(request)
|
8fb49c21b30b2986b9d87ab01d88487cc596006ef8f676ad72c06c550b2f0637 | import asyncio
import logging
import types
from asgiref.sync import async_to_sync, iscoroutinefunction, sync_to_async
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, MiddlewareNotUsed
from django.core.signals import request_finished
from django.db import connections, transaction
from django.urls import get_resolver, set_urlconf
from django.utils.log import log_response
from django.utils.module_loading import import_string
from .exception import convert_exception_to_response
logger = logging.getLogger("django.request")
class BaseHandler:
_view_middleware = None
_template_response_middleware = None
_exception_middleware = None
_middleware_chain = None
def load_middleware(self, is_async=False):
"""
Populate middleware lists from settings.MIDDLEWARE.
Must be called after the environment is fixed (see __call__ in subclasses).
"""
self._view_middleware = []
self._template_response_middleware = []
self._exception_middleware = []
get_response = self._get_response_async if is_async else self._get_response
handler = convert_exception_to_response(get_response)
handler_is_async = is_async
for middleware_path in reversed(settings.MIDDLEWARE):
middleware = import_string(middleware_path)
middleware_can_sync = getattr(middleware, "sync_capable", True)
middleware_can_async = getattr(middleware, "async_capable", False)
if not middleware_can_sync and not middleware_can_async:
raise RuntimeError(
"Middleware %s must have at least one of "
"sync_capable/async_capable set to True." % middleware_path
)
elif not handler_is_async and middleware_can_sync:
middleware_is_async = False
else:
middleware_is_async = middleware_can_async
try:
# Adapt handler, if needed.
adapted_handler = self.adapt_method_mode(
middleware_is_async,
handler,
handler_is_async,
debug=settings.DEBUG,
name="middleware %s" % middleware_path,
)
mw_instance = middleware(adapted_handler)
except MiddlewareNotUsed as exc:
if settings.DEBUG:
if str(exc):
logger.debug("MiddlewareNotUsed(%r): %s", middleware_path, exc)
else:
logger.debug("MiddlewareNotUsed: %r", middleware_path)
continue
else:
handler = adapted_handler
if mw_instance is None:
raise ImproperlyConfigured(
"Middleware factory %s returned None." % middleware_path
)
if hasattr(mw_instance, "process_view"):
self._view_middleware.insert(
0,
self.adapt_method_mode(is_async, mw_instance.process_view),
)
if hasattr(mw_instance, "process_template_response"):
self._template_response_middleware.append(
self.adapt_method_mode(
is_async, mw_instance.process_template_response
),
)
if hasattr(mw_instance, "process_exception"):
# The exception-handling stack is still always synchronous for
# now, so adapt that way.
self._exception_middleware.append(
self.adapt_method_mode(False, mw_instance.process_exception),
)
handler = convert_exception_to_response(mw_instance)
handler_is_async = middleware_is_async
# Adapt the top of the stack, if needed.
handler = self.adapt_method_mode(is_async, handler, handler_is_async)
# We only assign to this when initialization is complete as it is used
# as a flag for initialization being complete.
self._middleware_chain = handler
def adapt_method_mode(
self,
is_async,
method,
method_is_async=None,
debug=False,
name=None,
):
"""
Adapt a method to be in the correct "mode":
- If is_async is False:
- Synchronous methods are left alone
- Asynchronous methods are wrapped with async_to_sync
- If is_async is True:
- Synchronous methods are wrapped with sync_to_async()
- Asynchronous methods are left alone
"""
if method_is_async is None:
method_is_async = iscoroutinefunction(method)
if debug and not name:
name = name or "method %s()" % method.__qualname__
if is_async:
if not method_is_async:
if debug:
logger.debug("Synchronous handler adapted for %s.", name)
return sync_to_async(method, thread_sensitive=True)
elif method_is_async:
if debug:
logger.debug("Asynchronous handler adapted for %s.", name)
return async_to_sync(method)
return method
def get_response(self, request):
"""Return an HttpResponse object for the given HttpRequest."""
# Setup default url resolver for this thread
set_urlconf(settings.ROOT_URLCONF)
response = self._middleware_chain(request)
response._resource_closers.append(request.close)
if response.status_code >= 400:
log_response(
"%s: %s",
response.reason_phrase,
request.path,
response=response,
request=request,
)
return response
async def get_response_async(self, request):
"""
Asynchronous version of get_response.
Funneling everything, including WSGI, into a single async
get_response() is too slow. Avoid the context switch by using
a separate async response path.
"""
# Setup default url resolver for this thread.
set_urlconf(settings.ROOT_URLCONF)
response = await self._middleware_chain(request)
response._resource_closers.append(request.close)
if response.status_code >= 400:
await sync_to_async(log_response, thread_sensitive=False)(
"%s: %s",
response.reason_phrase,
request.path,
response=response,
request=request,
)
return response
def _get_response(self, request):
"""
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
"""
response = None
callback, callback_args, callback_kwargs = self.resolve_request(request)
# Apply view middleware
for middleware_method in self._view_middleware:
response = middleware_method(
request, callback, callback_args, callback_kwargs
)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
# If it is an asynchronous view, run it in a subthread.
if iscoroutinefunction(wrapped_callback):
wrapped_callback = async_to_sync(wrapped_callback)
try:
response = wrapped_callback(request, *callback_args, **callback_kwargs)
except Exception as e:
response = self.process_exception_by_middleware(e, request)
if response is None:
raise
# Complain if the view returned None (a common error).
self.check_response(response, callback)
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, "render") and callable(response.render):
for middleware_method in self._template_response_middleware:
response = middleware_method(request, response)
# Complain if the template response middleware returned None
# (a common error).
self.check_response(
response,
middleware_method,
name="%s.process_template_response"
% (middleware_method.__self__.__class__.__name__,),
)
try:
response = response.render()
except Exception as e:
response = self.process_exception_by_middleware(e, request)
if response is None:
raise
return response
async def _get_response_async(self, request):
"""
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
"""
response = None
callback, callback_args, callback_kwargs = self.resolve_request(request)
# Apply view middleware.
for middleware_method in self._view_middleware:
response = await middleware_method(
request, callback, callback_args, callback_kwargs
)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
# If it is a synchronous view, run it in a subthread
if not iscoroutinefunction(wrapped_callback):
wrapped_callback = sync_to_async(
wrapped_callback, thread_sensitive=True
)
try:
response = await wrapped_callback(
request, *callback_args, **callback_kwargs
)
except Exception as e:
response = await sync_to_async(
self.process_exception_by_middleware,
thread_sensitive=True,
)(e, request)
if response is None:
raise
# Complain if the view returned None or an uncalled coroutine.
self.check_response(response, callback)
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, "render") and callable(response.render):
for middleware_method in self._template_response_middleware:
response = await middleware_method(request, response)
# Complain if the template response middleware returned None or
# an uncalled coroutine.
self.check_response(
response,
middleware_method,
name="%s.process_template_response"
% (middleware_method.__self__.__class__.__name__,),
)
try:
if iscoroutinefunction(response.render):
response = await response.render()
else:
response = await sync_to_async(
response.render, thread_sensitive=True
)()
except Exception as e:
response = await sync_to_async(
self.process_exception_by_middleware,
thread_sensitive=True,
)(e, request)
if response is None:
raise
# Make sure the response is not a coroutine
if asyncio.iscoroutine(response):
raise RuntimeError("Response is still a coroutine.")
return response
def resolve_request(self, request):
"""
Retrieve/set the urlconf for the request. Return the view resolved,
with its args and kwargs.
"""
# Work out the resolver.
if hasattr(request, "urlconf"):
urlconf = request.urlconf
set_urlconf(urlconf)
resolver = get_resolver(urlconf)
else:
resolver = get_resolver()
# Resolve the view, and assign the match object back to the request.
resolver_match = resolver.resolve(request.path_info)
request.resolver_match = resolver_match
return resolver_match
def check_response(self, response, callback, name=None):
"""
Raise an error if the view returned None or an uncalled coroutine.
"""
if not (response is None or asyncio.iscoroutine(response)):
return
if not name:
if isinstance(callback, types.FunctionType): # FBV
name = "The view %s.%s" % (callback.__module__, callback.__name__)
else: # CBV
name = "The view %s.%s.__call__" % (
callback.__module__,
callback.__class__.__name__,
)
if response is None:
raise ValueError(
"%s didn't return an HttpResponse object. It returned None "
"instead." % name
)
elif asyncio.iscoroutine(response):
raise ValueError(
"%s didn't return an HttpResponse object. It returned an "
"unawaited coroutine instead. You may need to add an 'await' "
"into your view." % name
)
# Other utility methods.
def make_view_atomic(self, view):
non_atomic_requests = getattr(view, "_non_atomic_requests", set())
for alias, settings_dict in connections.settings.items():
if settings_dict["ATOMIC_REQUESTS"] and alias not in non_atomic_requests:
if iscoroutinefunction(view):
raise RuntimeError(
"You cannot use ATOMIC_REQUESTS with async views."
)
view = transaction.atomic(using=alias)(view)
return view
def process_exception_by_middleware(self, exception, request):
"""
Pass the exception to the exception middleware. If no middleware
return a response for this exception, return None.
"""
for middleware_method in self._exception_middleware:
response = middleware_method(request, exception)
if response:
return response
return None
def reset_urlconf(sender, **kwargs):
"""Reset the URLconf after each request is finished."""
set_urlconf(None)
request_finished.connect(reset_urlconf)
|
0fb8e2bc40654b6bebbd213fdba7f1515b32f37dd55f427bdea3db60d5945b6b | import functools
import glob
import gzip
import os
import sys
import warnings
import zipfile
from itertools import product
from django.apps import apps
from django.conf import settings
from django.core import serializers
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError
from django.core.management.color import no_style
from django.core.management.utils import parse_apps_and_model_labels
from django.db import (
DEFAULT_DB_ALIAS,
DatabaseError,
IntegrityError,
connections,
router,
transaction,
)
from django.utils.functional import cached_property
try:
import bz2
has_bz2 = True
except ImportError:
has_bz2 = False
try:
import lzma
has_lzma = True
except ImportError:
has_lzma = False
READ_STDIN = "-"
class Command(BaseCommand):
help = "Installs the named fixture(s) in the database."
missing_args_message = (
"No database fixture specified. Please provide the path of at least "
"one fixture in the command line."
)
def add_arguments(self, parser):
parser.add_argument(
"args", metavar="fixture", nargs="+", help="Fixture labels."
)
parser.add_argument(
"--database",
default=DEFAULT_DB_ALIAS,
help=(
"Nominates a specific database to load fixtures into. Defaults to the "
'"default" database.'
),
)
parser.add_argument(
"--app",
dest="app_label",
help="Only look for fixtures in the specified app.",
)
parser.add_argument(
"--ignorenonexistent",
"-i",
action="store_true",
dest="ignore",
help="Ignores entries in the serialized data for fields that do not "
"currently exist on the model.",
)
parser.add_argument(
"-e",
"--exclude",
action="append",
default=[],
help=(
"An app_label or app_label.ModelName to exclude. Can be used multiple "
"times."
),
)
parser.add_argument(
"--format",
help="Format of serialized data when reading from stdin.",
)
def handle(self, *fixture_labels, **options):
self.ignore = options["ignore"]
self.using = options["database"]
self.app_label = options["app_label"]
self.verbosity = options["verbosity"]
self.excluded_models, self.excluded_apps = parse_apps_and_model_labels(
options["exclude"]
)
self.format = options["format"]
with transaction.atomic(using=self.using):
self.loaddata(fixture_labels)
# Close the DB connection -- unless we're still in a transaction. This
# is required as a workaround for an edge case in MySQL: if the same
# connection is used to create tables, load data, and query, the query
# can return incorrect results. See Django #7572, MySQL #37735.
if transaction.get_autocommit(self.using):
connections[self.using].close()
@cached_property
def compression_formats(self):
"""A dict mapping format names to (open function, mode arg) tuples."""
# Forcing binary mode may be revisited after dropping Python 2 support
# (see #22399).
compression_formats = {
None: (open, "rb"),
"gz": (gzip.GzipFile, "rb"),
"zip": (SingleZipReader, "r"),
"stdin": (lambda *args: sys.stdin, None),
}
if has_bz2:
compression_formats["bz2"] = (bz2.BZ2File, "r")
if has_lzma:
compression_formats["lzma"] = (lzma.LZMAFile, "r")
compression_formats["xz"] = (lzma.LZMAFile, "r")
return compression_formats
def reset_sequences(self, connection, models):
"""Reset database sequences for the given connection and models."""
sequence_sql = connection.ops.sequence_reset_sql(no_style(), models)
if sequence_sql:
if self.verbosity >= 2:
self.stdout.write("Resetting sequences")
with connection.cursor() as cursor:
for line in sequence_sql:
cursor.execute(line)
def loaddata(self, fixture_labels):
connection = connections[self.using]
# Keep a count of the installed objects and fixtures
self.fixture_count = 0
self.loaded_object_count = 0
self.fixture_object_count = 0
self.models = set()
self.serialization_formats = serializers.get_public_serializer_formats()
# Django's test suite repeatedly tries to load initial_data fixtures
# from apps that don't have any fixtures. Because disabling constraint
# checks can be expensive on some database (especially MSSQL), bail
# out early if no fixtures are found.
for fixture_label in fixture_labels:
if self.find_fixtures(fixture_label):
break
else:
return
self.objs_with_deferred_fields = []
with connection.constraint_checks_disabled():
for fixture_label in fixture_labels:
self.load_label(fixture_label)
for obj in self.objs_with_deferred_fields:
obj.save_deferred_fields(using=self.using)
# Since we disabled constraint checks, we must manually check for
# any invalid keys that might have been added
table_names = [model._meta.db_table for model in self.models]
try:
connection.check_constraints(table_names=table_names)
except Exception as e:
e.args = ("Problem installing fixtures: %s" % e,)
raise
# If we found even one object in a fixture, we need to reset the
# database sequences.
if self.loaded_object_count > 0:
self.reset_sequences(connection, self.models)
if self.verbosity >= 1:
if self.fixture_object_count == self.loaded_object_count:
self.stdout.write(
"Installed %d object(s) from %d fixture(s)"
% (self.loaded_object_count, self.fixture_count)
)
else:
self.stdout.write(
"Installed %d object(s) (of %d) from %d fixture(s)"
% (
self.loaded_object_count,
self.fixture_object_count,
self.fixture_count,
)
)
def save_obj(self, obj):
"""Save an object if permitted."""
if (
obj.object._meta.app_config in self.excluded_apps
or type(obj.object) in self.excluded_models
):
return False
saved = False
if router.allow_migrate_model(self.using, obj.object.__class__):
saved = True
self.models.add(obj.object.__class__)
try:
obj.save(using=self.using)
# psycopg raises ValueError if data contains NUL chars.
except (DatabaseError, IntegrityError, ValueError) as e:
e.args = (
"Could not load %(object_label)s(pk=%(pk)s): %(error_msg)s"
% {
"object_label": obj.object._meta.label,
"pk": obj.object.pk,
"error_msg": e,
},
)
raise
if obj.deferred_fields:
self.objs_with_deferred_fields.append(obj)
return saved
def load_label(self, fixture_label):
"""Load fixtures files for a given label."""
show_progress = self.verbosity >= 3
for fixture_file, fixture_dir, fixture_name in self.find_fixtures(
fixture_label
):
_, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
open_method, mode = self.compression_formats[cmp_fmt]
fixture = open_method(fixture_file, mode)
self.fixture_count += 1
objects_in_fixture = 0
loaded_objects_in_fixture = 0
if self.verbosity >= 2:
self.stdout.write(
"Installing %s fixture '%s' from %s."
% (ser_fmt, fixture_name, humanize(fixture_dir))
)
try:
objects = serializers.deserialize(
ser_fmt,
fixture,
using=self.using,
ignorenonexistent=self.ignore,
handle_forward_references=True,
)
for obj in objects:
objects_in_fixture += 1
if self.save_obj(obj):
loaded_objects_in_fixture += 1
if show_progress:
self.stdout.write(
"\rProcessed %i object(s)." % loaded_objects_in_fixture,
ending="",
)
except Exception as e:
if not isinstance(e, CommandError):
e.args = (
"Problem installing fixture '%s': %s" % (fixture_file, e),
)
raise
finally:
fixture.close()
if objects_in_fixture and show_progress:
self.stdout.write() # Add a newline after progress indicator.
self.loaded_object_count += loaded_objects_in_fixture
self.fixture_object_count += objects_in_fixture
# Warn if the fixture we loaded contains 0 objects.
if objects_in_fixture == 0:
warnings.warn(
"No fixture data found for '%s'. (File format may be "
"invalid.)" % fixture_name,
RuntimeWarning,
)
def get_fixture_name_and_dirs(self, fixture_name):
dirname, basename = os.path.split(fixture_name)
if os.path.isabs(fixture_name):
fixture_dirs = [dirname]
else:
fixture_dirs = self.fixture_dirs
if os.path.sep in os.path.normpath(fixture_name):
fixture_dirs = [os.path.join(dir_, dirname) for dir_ in fixture_dirs]
return basename, fixture_dirs
def get_targets(self, fixture_name, ser_fmt, cmp_fmt):
databases = [self.using, None]
cmp_fmts = self.compression_formats if cmp_fmt is None else [cmp_fmt]
ser_fmts = self.serialization_formats if ser_fmt is None else [ser_fmt]
return {
"%s.%s"
% (
fixture_name,
".".join([ext for ext in combo if ext]),
)
for combo in product(databases, ser_fmts, cmp_fmts)
}
def find_fixture_files_in_dir(self, fixture_dir, fixture_name, targets):
fixture_files_in_dir = []
path = os.path.join(fixture_dir, fixture_name)
for candidate in glob.iglob(glob.escape(path) + "*"):
if os.path.basename(candidate) in targets:
# Save the fixture_dir and fixture_name for future error
# messages.
fixture_files_in_dir.append((candidate, fixture_dir, fixture_name))
return fixture_files_in_dir
@functools.lru_cache(maxsize=None)
def find_fixtures(self, fixture_label):
"""Find fixture files for a given label."""
if fixture_label == READ_STDIN:
return [(READ_STDIN, None, READ_STDIN)]
fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label)
if self.verbosity >= 2:
self.stdout.write("Loading '%s' fixtures..." % fixture_name)
fixture_name, fixture_dirs = self.get_fixture_name_and_dirs(fixture_name)
targets = self.get_targets(fixture_name, ser_fmt, cmp_fmt)
fixture_files = []
for fixture_dir in fixture_dirs:
if self.verbosity >= 2:
self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))
fixture_files_in_dir = self.find_fixture_files_in_dir(
fixture_dir,
fixture_name,
targets,
)
if self.verbosity >= 2 and not fixture_files_in_dir:
self.stdout.write(
"No fixture '%s' in %s." % (fixture_name, humanize(fixture_dir))
)
# Check kept for backwards-compatibility; it isn't clear why
# duplicates are only allowed in different directories.
if len(fixture_files_in_dir) > 1:
raise CommandError(
"Multiple fixtures named '%s' in %s. Aborting."
% (fixture_name, humanize(fixture_dir))
)
fixture_files.extend(fixture_files_in_dir)
if not fixture_files:
raise CommandError("No fixture named '%s' found." % fixture_name)
return fixture_files
@cached_property
def fixture_dirs(self):
"""
Return a list of fixture directories.
The list contains the 'fixtures' subdirectory of each installed
application, if it exists, the directories in FIXTURE_DIRS, and the
current directory.
"""
dirs = []
fixture_dirs = settings.FIXTURE_DIRS
if len(fixture_dirs) != len(set(fixture_dirs)):
raise ImproperlyConfigured("settings.FIXTURE_DIRS contains duplicates.")
for app_config in apps.get_app_configs():
app_label = app_config.label
app_dir = os.path.join(app_config.path, "fixtures")
if app_dir in [str(d) for d in fixture_dirs]:
raise ImproperlyConfigured(
"'%s' is a default fixture directory for the '%s' app "
"and cannot be listed in settings.FIXTURE_DIRS."
% (app_dir, app_label)
)
if self.app_label and app_label != self.app_label:
continue
if os.path.isdir(app_dir):
dirs.append(app_dir)
dirs.extend(fixture_dirs)
dirs.append("")
return [os.path.realpath(d) for d in dirs]
def parse_name(self, fixture_name):
"""
Split fixture name in name, serialization format, compression format.
"""
if fixture_name == READ_STDIN:
if not self.format:
raise CommandError(
"--format must be specified when reading from stdin."
)
return READ_STDIN, self.format, "stdin"
parts = fixture_name.rsplit(".", 2)
if len(parts) > 1 and parts[-1] in self.compression_formats:
cmp_fmt = parts[-1]
parts = parts[:-1]
else:
cmp_fmt = None
if len(parts) > 1:
if parts[-1] in self.serialization_formats:
ser_fmt = parts[-1]
parts = parts[:-1]
else:
raise CommandError(
"Problem installing fixture '%s': %s is not a known "
"serialization format." % (".".join(parts[:-1]), parts[-1])
)
else:
ser_fmt = None
name = ".".join(parts)
return name, ser_fmt, cmp_fmt
class SingleZipReader(zipfile.ZipFile):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if len(self.namelist()) != 1:
raise ValueError("Zip-compressed fixtures must contain one file.")
def read(self):
return zipfile.ZipFile.read(self, self.namelist()[0])
def humanize(dirname):
return "'%s'" % dirname if dirname else "absolute path"
|
0c4e98551bd3dfc886c4b850908605c6202a103a05b057934f4b9022b1452db7 | import keyword
import re
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections
from django.db.models.constants import LOOKUP_SEP
class Command(BaseCommand):
help = (
"Introspects the database tables in the given database and outputs a Django "
"model module."
)
requires_system_checks = []
stealth_options = ("table_name_filter",)
db_module = "django.db"
def add_arguments(self, parser):
parser.add_argument(
"table",
nargs="*",
type=str,
help="Selects what tables or views should be introspected.",
)
parser.add_argument(
"--database",
default=DEFAULT_DB_ALIAS,
help=(
'Nominates a database to introspect. Defaults to using the "default" '
"database."
),
)
parser.add_argument(
"--include-partitions",
action="store_true",
help="Also output models for partition tables.",
)
parser.add_argument(
"--include-views",
action="store_true",
help="Also output models for database views.",
)
def handle(self, **options):
try:
for line in self.handle_inspection(options):
self.stdout.write(line)
except NotImplementedError:
raise CommandError(
"Database inspection isn't supported for the currently selected "
"database backend."
)
def handle_inspection(self, options):
connection = connections[options["database"]]
# 'table_name_filter' is a stealth option
table_name_filter = options.get("table_name_filter")
def table2model(table_name):
return re.sub(r"[^a-zA-Z0-9]", "", table_name.title())
with connection.cursor() as cursor:
yield "# This is an auto-generated Django model module."
yield "# You'll have to do the following manually to clean this up:"
yield "# * Rearrange models' order"
yield "# * Make sure each model has one field with primary_key=True"
yield (
"# * Make sure each ForeignKey and OneToOneField has `on_delete` set "
"to the desired behavior"
)
yield (
"# * Remove `managed = False` lines if you wish to allow "
"Django to create, modify, and delete the table"
)
yield (
"# Feel free to rename the models, but don't rename db_table values or "
"field names."
)
yield "from %s import models" % self.db_module
known_models = []
# Determine types of tables and/or views to be introspected.
types = {"t"}
if options["include_partitions"]:
types.add("p")
if options["include_views"]:
types.add("v")
table_info = connection.introspection.get_table_list(cursor)
table_info = {info.name: info for info in table_info if info.type in types}
for table_name in options["table"] or sorted(name for name in table_info):
if table_name_filter is not None and callable(table_name_filter):
if not table_name_filter(table_name):
continue
try:
try:
relations = connection.introspection.get_relations(
cursor, table_name
)
except NotImplementedError:
relations = {}
try:
constraints = connection.introspection.get_constraints(
cursor, table_name
)
except NotImplementedError:
constraints = {}
primary_key_columns = (
connection.introspection.get_primary_key_columns(
cursor, table_name
)
)
primary_key_column = (
primary_key_columns[0] if primary_key_columns else None
)
unique_columns = [
c["columns"][0]
for c in constraints.values()
if c["unique"] and len(c["columns"]) == 1
]
table_description = connection.introspection.get_table_description(
cursor, table_name
)
except Exception as e:
yield "# Unable to inspect table '%s'" % table_name
yield "# The error was: %s" % e
continue
model_name = table2model(table_name)
yield ""
yield ""
yield "class %s(models.Model):" % model_name
known_models.append(model_name)
used_column_names = [] # Holds column names used in the table so far
column_to_field_name = {} # Maps column names to names of model fields
used_relations = set() # Holds foreign relations used in the table.
for row in table_description:
comment_notes = (
[]
) # Holds Field notes, to be displayed in a Python comment.
extra_params = {} # Holds Field parameters such as 'db_column'.
column_name = row.name
is_relation = column_name in relations
att_name, params, notes = self.normalize_col_name(
column_name, used_column_names, is_relation
)
extra_params.update(params)
comment_notes.extend(notes)
used_column_names.append(att_name)
column_to_field_name[column_name] = att_name
# Add primary_key and unique, if necessary.
if column_name == primary_key_column:
extra_params["primary_key"] = True
if len(primary_key_columns) > 1:
comment_notes.append(
"The composite primary key (%s) found, that is not "
"supported. The first column is selected."
% ", ".join(primary_key_columns)
)
elif column_name in unique_columns:
extra_params["unique"] = True
if is_relation:
ref_db_column, ref_db_table = relations[column_name]
if extra_params.pop("unique", False) or extra_params.get(
"primary_key"
):
rel_type = "OneToOneField"
else:
rel_type = "ForeignKey"
ref_pk_column = (
connection.introspection.get_primary_key_column(
cursor, ref_db_table
)
)
if ref_pk_column and ref_pk_column != ref_db_column:
extra_params["to_field"] = ref_db_column
rel_to = (
"self"
if ref_db_table == table_name
else table2model(ref_db_table)
)
if rel_to in known_models:
field_type = "%s(%s" % (rel_type, rel_to)
else:
field_type = "%s('%s'" % (rel_type, rel_to)
if rel_to in used_relations:
extra_params["related_name"] = "%s_%s_set" % (
model_name.lower(),
att_name,
)
used_relations.add(rel_to)
else:
# Calling `get_field_type` to get the field type string and any
# additional parameters and notes.
field_type, field_params, field_notes = self.get_field_type(
connection, table_name, row
)
extra_params.update(field_params)
comment_notes.extend(field_notes)
field_type += "("
# Don't output 'id = meta.AutoField(primary_key=True)', because
# that's assumed if it doesn't exist.
if att_name == "id" and extra_params == {"primary_key": True}:
if field_type == "AutoField(":
continue
elif (
field_type
== connection.features.introspected_field_types["AutoField"]
+ "("
):
comment_notes.append("AutoField?")
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
# table description.
if row.null_ok: # If it's NULL...
extra_params["blank"] = True
extra_params["null"] = True
field_desc = "%s = %s%s" % (
att_name,
# Custom fields will have a dotted path
"" if "." in field_type else "models.",
field_type,
)
if field_type.startswith(("ForeignKey(", "OneToOneField(")):
field_desc += ", models.DO_NOTHING"
# Add comment.
if connection.features.supports_comments and row.comment:
extra_params["db_comment"] = row.comment
if extra_params:
if not field_desc.endswith("("):
field_desc += ", "
field_desc += ", ".join(
"%s=%r" % (k, v) for k, v in extra_params.items()
)
field_desc += ")"
if comment_notes:
field_desc += " # " + " ".join(comment_notes)
yield " %s" % field_desc
comment = None
if info := table_info.get(table_name):
is_view = info.type == "v"
is_partition = info.type == "p"
if connection.features.supports_comments:
comment = info.comment
else:
is_view = False
is_partition = False
yield from self.get_meta(
table_name,
constraints,
column_to_field_name,
is_view,
is_partition,
comment,
)
def normalize_col_name(self, col_name, used_column_names, is_relation):
"""
Modify the column name to make it Python-compatible as a field name
"""
field_params = {}
field_notes = []
new_name = col_name.lower()
if new_name != col_name:
field_notes.append("Field name made lowercase.")
if is_relation:
if new_name.endswith("_id"):
new_name = new_name[:-3]
else:
field_params["db_column"] = col_name
new_name, num_repl = re.subn(r"\W", "_", new_name)
if num_repl > 0:
field_notes.append("Field renamed to remove unsuitable characters.")
if new_name.find(LOOKUP_SEP) >= 0:
while new_name.find(LOOKUP_SEP) >= 0:
new_name = new_name.replace(LOOKUP_SEP, "_")
if col_name.lower().find(LOOKUP_SEP) >= 0:
# Only add the comment if the double underscore was in the original name
field_notes.append(
"Field renamed because it contained more than one '_' in a row."
)
if new_name.startswith("_"):
new_name = "field%s" % new_name
field_notes.append("Field renamed because it started with '_'.")
if new_name.endswith("_"):
new_name = "%sfield" % new_name
field_notes.append("Field renamed because it ended with '_'.")
if keyword.iskeyword(new_name):
new_name += "_field"
field_notes.append("Field renamed because it was a Python reserved word.")
if new_name[0].isdigit():
new_name = "number_%s" % new_name
field_notes.append(
"Field renamed because it wasn't a valid Python identifier."
)
if new_name in used_column_names:
num = 0
while "%s_%d" % (new_name, num) in used_column_names:
num += 1
new_name = "%s_%d" % (new_name, num)
field_notes.append("Field renamed because of name conflict.")
if col_name != new_name and field_notes:
field_params["db_column"] = col_name
return new_name, field_params, field_notes
def get_field_type(self, connection, table_name, row):
"""
Given the database connection, the table name, and the cursor row
description, this routine will return the given field type name, as
well as any additional keyword parameters and notes for the field.
"""
field_params = {}
field_notes = []
try:
field_type = connection.introspection.get_field_type(row.type_code, row)
except KeyError:
field_type = "TextField"
field_notes.append("This field type is a guess.")
# Add max_length for all CharFields.
if field_type == "CharField" and row.display_size:
if (size := int(row.display_size)) and size > 0:
field_params["max_length"] = size
if field_type in {"CharField", "TextField"} and row.collation:
field_params["db_collation"] = row.collation
if field_type == "DecimalField":
if row.precision is None or row.scale is None:
field_notes.append(
"max_digits and decimal_places have been guessed, as this "
"database handles decimal fields as float"
)
field_params["max_digits"] = (
row.precision if row.precision is not None else 10
)
field_params["decimal_places"] = (
row.scale if row.scale is not None else 5
)
else:
field_params["max_digits"] = row.precision
field_params["decimal_places"] = row.scale
return field_type, field_params, field_notes
def get_meta(
self,
table_name,
constraints,
column_to_field_name,
is_view,
is_partition,
comment,
):
"""
Return a sequence comprising the lines of code necessary
to construct the inner Meta class for the model corresponding
to the given database table name.
"""
unique_together = []
has_unsupported_constraint = False
for params in constraints.values():
if params["unique"]:
columns = params["columns"]
if None in columns:
has_unsupported_constraint = True
columns = [
x for x in columns if x is not None and x in column_to_field_name
]
if len(columns) > 1:
unique_together.append(
str(tuple(column_to_field_name[c] for c in columns))
)
if is_view:
managed_comment = " # Created from a view. Don't remove."
elif is_partition:
managed_comment = " # Created from a partition. Don't remove."
else:
managed_comment = ""
meta = [""]
if has_unsupported_constraint:
meta.append(" # A unique constraint could not be introspected.")
meta += [
" class Meta:",
" managed = False%s" % managed_comment,
" db_table = %r" % table_name,
]
if unique_together:
tup = "(" + ", ".join(unique_together) + ",)"
meta += [" unique_together = %s" % tup]
if comment:
meta += [f" db_table_comment = {comment!r}"]
return meta
|
ae511bd1ed88115bf858f93ebc3c730c0bf26e42753ac367d7e73b6d4ac06156 | "File-based cache backend"
import glob
import os
import pickle
import random
import tempfile
import time
import zlib
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.core.files import locks
from django.core.files.move import file_move_safe
from django.utils.crypto import md5
class FileBasedCache(BaseCache):
cache_suffix = ".djcache"
pickle_protocol = pickle.HIGHEST_PROTOCOL
def __init__(self, dir, params):
super().__init__(params)
self._dir = os.path.abspath(dir)
self._createdir()
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
if self.has_key(key, version):
return False
self.set(key, value, timeout, version)
return True
def get(self, key, default=None, version=None):
fname = self._key_to_file(key, version)
try:
with open(fname, "rb") as f:
if not self._is_expired(f):
return pickle.loads(zlib.decompress(f.read()))
except FileNotFoundError:
pass
return default
def _write_content(self, file, timeout, value):
expiry = self.get_backend_timeout(timeout)
file.write(pickle.dumps(expiry, self.pickle_protocol))
file.write(zlib.compress(pickle.dumps(value, self.pickle_protocol)))
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
self._createdir() # Cache dir can be deleted at any time.
fname = self._key_to_file(key, version)
self._cull() # make some room if necessary
fd, tmp_path = tempfile.mkstemp(dir=self._dir)
renamed = False
try:
with open(fd, "wb") as f:
self._write_content(f, timeout, value)
file_move_safe(tmp_path, fname, allow_overwrite=True)
renamed = True
finally:
if not renamed:
os.remove(tmp_path)
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
try:
with open(self._key_to_file(key, version), "r+b") as f:
try:
locks.lock(f, locks.LOCK_EX)
if self._is_expired(f):
return False
else:
previous_value = pickle.loads(zlib.decompress(f.read()))
f.seek(0)
self._write_content(f, timeout, previous_value)
return True
finally:
locks.unlock(f)
except FileNotFoundError:
return False
def delete(self, key, version=None):
return self._delete(self._key_to_file(key, version))
def _delete(self, fname):
if not fname.startswith(self._dir) or not os.path.exists(fname):
return False
try:
os.remove(fname)
except FileNotFoundError:
# The file may have been removed by another process.
return False
return True
def has_key(self, key, version=None):
fname = self._key_to_file(key, version)
try:
with open(fname, "rb") as f:
return not self._is_expired(f)
except FileNotFoundError:
return False
def _cull(self):
"""
Remove random cache entries if max_entries is reached at a ratio
of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means
that the entire cache will be purged.
"""
filelist = self._list_cache_files()
num_entries = len(filelist)
if num_entries < self._max_entries:
return # return early if no culling is required
if self._cull_frequency == 0:
return self.clear() # Clear the cache when CULL_FREQUENCY = 0
# Delete a random selection of entries
filelist = random.sample(filelist, int(num_entries / self._cull_frequency))
for fname in filelist:
self._delete(fname)
def _createdir(self):
# Set the umask because os.makedirs() doesn't apply the "mode" argument
# to intermediate-level directories.
old_umask = os.umask(0o077)
try:
os.makedirs(self._dir, 0o700, exist_ok=True)
finally:
os.umask(old_umask)
def _key_to_file(self, key, version=None):
"""
Convert a key into a cache file path. Basically this is the
root cache path joined with the md5sum of the key and a suffix.
"""
key = self.make_and_validate_key(key, version=version)
return os.path.join(
self._dir,
"".join(
[
md5(key.encode(), usedforsecurity=False).hexdigest(),
self.cache_suffix,
]
),
)
def clear(self):
"""
Remove all the cache files.
"""
for fname in self._list_cache_files():
self._delete(fname)
def _is_expired(self, f):
"""
Take an open cache file `f` and delete it if it's expired.
"""
try:
exp = pickle.load(f)
except EOFError:
exp = 0 # An empty file is considered expired.
if exp is not None and exp < time.time():
f.close() # On Windows a file has to be closed before deleting
self._delete(f.name)
return True
return False
def _list_cache_files(self):
"""
Get a list of paths to all the cache files. These are all the files
in the root cache dir that end on the cache_suffix.
"""
return [
os.path.join(self._dir, fname)
for fname in glob.glob1(self._dir, "*%s" % self.cache_suffix)
]
|
4c1d5bc3524aee399430b96efa7ceeb8c86d529d095c1c7314e5f5e3d4557857 | """Redis cache backend."""
import pickle
import random
import re
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
class RedisSerializer:
def __init__(self, protocol=None):
self.protocol = pickle.HIGHEST_PROTOCOL if protocol is None else protocol
def dumps(self, obj):
# Only skip pickling for integers, a int subclasses as bool should be
# pickled.
if type(obj) is int:
return obj
return pickle.dumps(obj, self.protocol)
def loads(self, data):
try:
return int(data)
except ValueError:
return pickle.loads(data)
class RedisCacheClient:
def __init__(
self,
servers,
serializer=None,
pool_class=None,
parser_class=None,
**options,
):
import redis
self._lib = redis
self._servers = servers
self._pools = {}
self._client = self._lib.Redis
if isinstance(pool_class, str):
pool_class = import_string(pool_class)
self._pool_class = pool_class or self._lib.ConnectionPool
if isinstance(serializer, str):
serializer = import_string(serializer)
if callable(serializer):
serializer = serializer()
self._serializer = serializer or RedisSerializer()
if isinstance(parser_class, str):
parser_class = import_string(parser_class)
parser_class = parser_class or self._lib.connection.DefaultParser
self._pool_options = {"parser_class": parser_class, **options}
def _get_connection_pool_index(self, write):
# Write to the first server. Read from other servers if there are more,
# otherwise read from the first server.
if write or len(self._servers) == 1:
return 0
return random.randint(1, len(self._servers) - 1)
def _get_connection_pool(self, write):
index = self._get_connection_pool_index(write)
if index not in self._pools:
self._pools[index] = self._pool_class.from_url(
self._servers[index],
**self._pool_options,
)
return self._pools[index]
def get_client(self, key=None, *, write=False):
# key is used so that the method signature remains the same and custom
# cache client can be implemented which might require the key to select
# the server, e.g. sharding.
pool = self._get_connection_pool(write)
return self._client(connection_pool=pool)
def add(self, key, value, timeout):
client = self.get_client(key, write=True)
value = self._serializer.dumps(value)
if timeout == 0:
if ret := bool(client.set(key, value, nx=True)):
client.delete(key)
return ret
else:
return bool(client.set(key, value, ex=timeout, nx=True))
def get(self, key, default):
client = self.get_client(key)
value = client.get(key)
return default if value is None else self._serializer.loads(value)
def set(self, key, value, timeout):
client = self.get_client(key, write=True)
value = self._serializer.dumps(value)
if timeout == 0:
client.delete(key)
else:
client.set(key, value, ex=timeout)
def touch(self, key, timeout):
client = self.get_client(key, write=True)
if timeout is None:
return bool(client.persist(key))
else:
return bool(client.expire(key, timeout))
def delete(self, key):
client = self.get_client(key, write=True)
return bool(client.delete(key))
def get_many(self, keys):
client = self.get_client(None)
ret = client.mget(keys)
return {
k: self._serializer.loads(v) for k, v in zip(keys, ret) if v is not None
}
def has_key(self, key):
client = self.get_client(key)
return bool(client.exists(key))
def incr(self, key, delta):
client = self.get_client(key, write=True)
if not client.exists(key):
raise ValueError("Key '%s' not found." % key)
return client.incr(key, delta)
def set_many(self, data, timeout):
client = self.get_client(None, write=True)
pipeline = client.pipeline()
pipeline.mset({k: self._serializer.dumps(v) for k, v in data.items()})
if timeout is not None:
# Setting timeout for each key as redis does not support timeout
# with mset().
for key in data:
pipeline.expire(key, timeout)
pipeline.execute()
def delete_many(self, keys):
client = self.get_client(None, write=True)
client.delete(*keys)
def clear(self):
client = self.get_client(None, write=True)
return bool(client.flushdb())
class RedisCache(BaseCache):
def __init__(self, server, params):
super().__init__(params)
if isinstance(server, str):
self._servers = re.split("[;,]", server)
else:
self._servers = server
self._class = RedisCacheClient
self._options = params.get("OPTIONS", {})
@cached_property
def _cache(self):
return self._class(self._servers, **self._options)
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
if timeout == DEFAULT_TIMEOUT:
timeout = self.default_timeout
# The key will be made persistent if None used as a timeout.
# Non-positive values will cause the key to be deleted.
return None if timeout is None else max(0, int(timeout))
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.add(key, value, self.get_backend_timeout(timeout))
def get(self, key, default=None, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.get(key, default)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
self._cache.set(key, value, self.get_backend_timeout(timeout))
def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.touch(key, self.get_backend_timeout(timeout))
def delete(self, key, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.delete(key)
def get_many(self, keys, version=None):
key_map = {
self.make_and_validate_key(key, version=version): key for key in keys
}
ret = self._cache.get_many(key_map.keys())
return {key_map[k]: v for k, v in ret.items()}
def has_key(self, key, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.has_key(key)
def incr(self, key, delta=1, version=None):
key = self.make_and_validate_key(key, version=version)
return self._cache.incr(key, delta)
def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None):
if not data:
return []
safe_data = {}
for key, value in data.items():
key = self.make_and_validate_key(key, version=version)
safe_data[key] = value
self._cache.set_many(safe_data, self.get_backend_timeout(timeout))
return []
def delete_many(self, keys, version=None):
if not keys:
return
safe_keys = [self.make_and_validate_key(key, version=version) for key in keys]
self._cache.delete_many(safe_keys)
def clear(self):
return self._cache.clear()
|
9e0e66ba24c3d00278da340afbb16a83676d8063376418006bc84025676359f6 | import warnings
from urllib.parse import urlencode
from urllib.request import urlopen
from django.apps import apps as django_apps
from django.conf import settings
from django.core import paginator
from django.core.exceptions import ImproperlyConfigured
from django.urls import NoReverseMatch, reverse
from django.utils import translation
from django.utils.deprecation import RemovedInDjango50Warning
PING_URL = "https://www.google.com/webmasters/tools/ping"
class SitemapNotFound(Exception):
pass
def ping_google(sitemap_url=None, ping_url=PING_URL, sitemap_uses_https=True):
"""
Alert Google that the sitemap for the current site has been updated.
If sitemap_url is provided, it should be an absolute path to the sitemap
for this site -- e.g., '/sitemap.xml'. If sitemap_url is not provided, this
function will attempt to deduce it by using urls.reverse().
"""
sitemap_full_url = _get_sitemap_full_url(sitemap_url, sitemap_uses_https)
params = urlencode({"sitemap": sitemap_full_url})
urlopen("%s?%s" % (ping_url, params))
def _get_sitemap_full_url(sitemap_url, sitemap_uses_https=True):
if not django_apps.is_installed("django.contrib.sites"):
raise ImproperlyConfigured(
"ping_google requires django.contrib.sites, which isn't installed."
)
if sitemap_url is None:
try:
# First, try to get the "index" sitemap URL.
sitemap_url = reverse("django.contrib.sitemaps.views.index")
except NoReverseMatch:
try:
# Next, try for the "global" sitemap URL.
sitemap_url = reverse("django.contrib.sitemaps.views.sitemap")
except NoReverseMatch:
pass
if sitemap_url is None:
raise SitemapNotFound(
"You didn't provide a sitemap_url, and the sitemap URL couldn't be "
"auto-detected."
)
Site = django_apps.get_model("sites.Site")
current_site = Site.objects.get_current()
scheme = "https" if sitemap_uses_https else "http"
return "%s://%s%s" % (scheme, current_site.domain, sitemap_url)
class Sitemap:
# This limit is defined by Google. See the index documentation at
# https://www.sitemaps.org/protocol.html#index.
limit = 50000
# If protocol is None, the URLs in the sitemap will use the protocol
# with which the sitemap was requested.
protocol = None
# Enables generating URLs for all languages.
i18n = False
# Override list of languages to use.
languages = None
# Enables generating alternate/hreflang links.
alternates = False
# Add an alternate/hreflang link with value 'x-default'.
x_default = False
def _get(self, name, item, default=None):
try:
attr = getattr(self, name)
except AttributeError:
return default
if callable(attr):
if self.i18n:
# Split the (item, lang_code) tuples again for the location,
# priority, lastmod and changefreq method calls.
item, lang_code = item
return attr(item)
return attr
def get_languages_for_item(self, item):
"""Languages for which this item is displayed."""
return self._languages()
def _languages(self):
if self.languages is not None:
return self.languages
return [lang_code for lang_code, _ in settings.LANGUAGES]
def _items(self):
if self.i18n:
# Create (item, lang_code) tuples for all items and languages.
# This is necessary to paginate with all languages already considered.
items = [
(item, lang_code)
for item in self.items()
for lang_code in self.get_languages_for_item(item)
]
return items
return self.items()
def _location(self, item, force_lang_code=None):
if self.i18n:
obj, lang_code = item
# Activate language from item-tuple or forced one before calling location.
with translation.override(force_lang_code or lang_code):
return self._get("location", item)
return self._get("location", item)
@property
def paginator(self):
return paginator.Paginator(self._items(), self.limit)
def items(self):
return []
def location(self, item):
return item.get_absolute_url()
def get_protocol(self, protocol=None):
# Determine protocol
if self.protocol is None and protocol is None:
warnings.warn(
"The default sitemap protocol will be changed from 'http' to "
"'https' in Django 5.0. Set Sitemap.protocol to silence this "
"warning.",
category=RemovedInDjango50Warning,
stacklevel=2,
)
# RemovedInDjango50Warning: when the deprecation ends, replace 'http'
# with 'https'.
return self.protocol or protocol or "http"
def get_domain(self, site=None):
# Determine domain
if site is None:
if django_apps.is_installed("django.contrib.sites"):
Site = django_apps.get_model("sites.Site")
try:
site = Site.objects.get_current()
except Site.DoesNotExist:
pass
if site is None:
raise ImproperlyConfigured(
"To use sitemaps, either enable the sites framework or pass "
"a Site/RequestSite object in your view."
)
return site.domain
def get_urls(self, page=1, site=None, protocol=None):
protocol = self.get_protocol(protocol)
domain = self.get_domain(site)
return self._urls(page, protocol, domain)
def get_latest_lastmod(self):
if not hasattr(self, "lastmod"):
return None
if callable(self.lastmod):
try:
return max([self.lastmod(item) for item in self.items()], default=None)
except TypeError:
return None
else:
return self.lastmod
def _urls(self, page, protocol, domain):
urls = []
latest_lastmod = None
all_items_lastmod = True # track if all items have a lastmod
paginator_page = self.paginator.page(page)
for item in paginator_page.object_list:
loc = f"{protocol}://{domain}{self._location(item)}"
priority = self._get("priority", item)
lastmod = self._get("lastmod", item)
if all_items_lastmod:
all_items_lastmod = lastmod is not None
if all_items_lastmod and (
latest_lastmod is None or lastmod > latest_lastmod
):
latest_lastmod = lastmod
url_info = {
"item": item,
"location": loc,
"lastmod": lastmod,
"changefreq": self._get("changefreq", item),
"priority": str(priority if priority is not None else ""),
"alternates": [],
}
if self.i18n and self.alternates:
item_languages = self.get_languages_for_item(item[0])
for lang_code in item_languages:
loc = f"{protocol}://{domain}{self._location(item, lang_code)}"
url_info["alternates"].append(
{
"location": loc,
"lang_code": lang_code,
}
)
if self.x_default and settings.LANGUAGE_CODE in item_languages:
lang_code = settings.LANGUAGE_CODE
loc = f"{protocol}://{domain}{self._location(item, lang_code)}"
loc = loc.replace(f"/{lang_code}/", "/", 1)
url_info["alternates"].append(
{
"location": loc,
"lang_code": "x-default",
}
)
urls.append(url_info)
if all_items_lastmod and latest_lastmod:
self.latest_lastmod = latest_lastmod
return urls
class GenericSitemap(Sitemap):
priority = None
changefreq = None
def __init__(self, info_dict, priority=None, changefreq=None, protocol=None):
self.queryset = info_dict["queryset"]
self.date_field = info_dict.get("date_field")
self.priority = self.priority or priority
self.changefreq = self.changefreq or changefreq
self.protocol = self.protocol or protocol
def items(self):
# Make sure to return a clone; we don't want premature evaluation.
return self.queryset.filter()
def lastmod(self, item):
if self.date_field is not None:
return getattr(item, self.date_field)
return None
def get_latest_lastmod(self):
if self.date_field is not None:
return (
self.queryset.order_by("-" + self.date_field)
.values_list(self.date_field, flat=True)
.first()
)
return None
|
c73fec3a3ddde599c877f15e9cfabfe586effadfdba5df5bf033b4303e591c57 | from django.db.models import (
CharField,
Expression,
Field,
FloatField,
Func,
Lookup,
TextField,
Value,
)
from django.db.models.expressions import CombinedExpression, register_combinable_fields
from django.db.models.functions import Cast, Coalesce
class SearchVectorExact(Lookup):
lookup_name = "exact"
def process_rhs(self, qn, connection):
if not isinstance(self.rhs, (SearchQuery, CombinedSearchQuery)):
config = getattr(self.lhs, "config", None)
self.rhs = SearchQuery(self.rhs, config=config)
rhs, rhs_params = super().process_rhs(qn, connection)
return rhs, rhs_params
def as_sql(self, qn, connection):
lhs, lhs_params = self.process_lhs(qn, connection)
rhs, rhs_params = self.process_rhs(qn, connection)
params = lhs_params + rhs_params
return "%s @@ %s" % (lhs, rhs), params
class SearchVectorField(Field):
def db_type(self, connection):
return "tsvector"
class SearchQueryField(Field):
def db_type(self, connection):
return "tsquery"
class _Float4Field(Field):
def db_type(self, connection):
return "float4"
class SearchConfig(Expression):
def __init__(self, config):
super().__init__()
if not hasattr(config, "resolve_expression"):
config = Value(config)
self.config = config
@classmethod
def from_parameter(cls, config):
if config is None or isinstance(config, cls):
return config
return cls(config)
def get_source_expressions(self):
return [self.config]
def set_source_expressions(self, exprs):
(self.config,) = exprs
def as_sql(self, compiler, connection):
sql, params = compiler.compile(self.config)
return "%s::regconfig" % sql, params
class SearchVectorCombinable:
ADD = "||"
def _combine(self, other, connector, reversed):
if not isinstance(other, SearchVectorCombinable):
raise TypeError(
"SearchVector can only be combined with other SearchVector "
"instances, got %s." % type(other).__name__
)
if reversed:
return CombinedSearchVector(other, connector, self, self.config)
return CombinedSearchVector(self, connector, other, self.config)
register_combinable_fields(
SearchVectorField, SearchVectorCombinable.ADD, SearchVectorField, SearchVectorField
)
class SearchVector(SearchVectorCombinable, Func):
function = "to_tsvector"
arg_joiner = " || ' ' || "
output_field = SearchVectorField()
def __init__(self, *expressions, config=None, weight=None):
super().__init__(*expressions)
self.config = SearchConfig.from_parameter(config)
if weight is not None and not hasattr(weight, "resolve_expression"):
weight = Value(weight)
self.weight = weight
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
resolved = super().resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
if self.config:
resolved.config = self.config.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
return resolved
def as_sql(self, compiler, connection, function=None, template=None):
clone = self.copy()
clone.set_source_expressions(
[
Coalesce(
expression
if isinstance(expression.output_field, (CharField, TextField))
else Cast(expression, TextField()),
Value(""),
)
for expression in clone.get_source_expressions()
]
)
config_sql = None
config_params = []
if template is None:
if clone.config:
config_sql, config_params = compiler.compile(clone.config)
template = "%(function)s(%(config)s, %(expressions)s)"
else:
template = clone.template
sql, params = super(SearchVector, clone).as_sql(
compiler,
connection,
function=function,
template=template,
config=config_sql,
)
extra_params = []
if clone.weight:
weight_sql, extra_params = compiler.compile(clone.weight)
sql = "setweight({}, {})".format(sql, weight_sql)
# These parameters must be bound on the client side because we may
# want to create an index on this expression.
sql = connection.ops.compose_sql(sql, config_params + params + extra_params)
return sql, []
class CombinedSearchVector(SearchVectorCombinable, CombinedExpression):
def __init__(self, lhs, connector, rhs, config, output_field=None):
self.config = config
super().__init__(lhs, connector, rhs, output_field)
class SearchQueryCombinable:
BITAND = "&&"
BITOR = "||"
def _combine(self, other, connector, reversed):
if not isinstance(other, SearchQueryCombinable):
raise TypeError(
"SearchQuery can only be combined with other SearchQuery "
"instances, got %s." % type(other).__name__
)
if reversed:
return CombinedSearchQuery(other, connector, self, self.config)
return CombinedSearchQuery(self, connector, other, self.config)
# On Combinable, these are not implemented to reduce confusion with Q. In
# this case we are actually (ab)using them to do logical combination so
# it's consistent with other usage in Django.
def __or__(self, other):
return self._combine(other, self.BITOR, False)
def __ror__(self, other):
return self._combine(other, self.BITOR, True)
def __and__(self, other):
return self._combine(other, self.BITAND, False)
def __rand__(self, other):
return self._combine(other, self.BITAND, True)
class SearchQuery(SearchQueryCombinable, Func):
output_field = SearchQueryField()
SEARCH_TYPES = {
"plain": "plainto_tsquery",
"phrase": "phraseto_tsquery",
"raw": "to_tsquery",
"websearch": "websearch_to_tsquery",
}
def __init__(
self,
value,
output_field=None,
*,
config=None,
invert=False,
search_type="plain",
):
self.function = self.SEARCH_TYPES.get(search_type)
if self.function is None:
raise ValueError("Unknown search_type argument '%s'." % search_type)
if not hasattr(value, "resolve_expression"):
value = Value(value)
expressions = (value,)
self.config = SearchConfig.from_parameter(config)
if self.config is not None:
expressions = (self.config,) + expressions
self.invert = invert
super().__init__(*expressions, output_field=output_field)
def as_sql(self, compiler, connection, function=None, template=None):
sql, params = super().as_sql(compiler, connection, function, template)
if self.invert:
sql = "!!(%s)" % sql
return sql, params
def __invert__(self):
clone = self.copy()
clone.invert = not self.invert
return clone
def __str__(self):
result = super().__str__()
return ("~%s" % result) if self.invert else result
class CombinedSearchQuery(SearchQueryCombinable, CombinedExpression):
def __init__(self, lhs, connector, rhs, config, output_field=None):
self.config = config
super().__init__(lhs, connector, rhs, output_field)
def __str__(self):
return "(%s)" % super().__str__()
class SearchRank(Func):
function = "ts_rank"
output_field = FloatField()
def __init__(
self,
vector,
query,
weights=None,
normalization=None,
cover_density=False,
):
from .fields.array import ArrayField
if not hasattr(vector, "resolve_expression"):
vector = SearchVector(vector)
if not hasattr(query, "resolve_expression"):
query = SearchQuery(query)
expressions = (vector, query)
if weights is not None:
if not hasattr(weights, "resolve_expression"):
weights = Value(weights)
weights = Cast(weights, ArrayField(_Float4Field()))
expressions = (weights,) + expressions
if normalization is not None:
if not hasattr(normalization, "resolve_expression"):
normalization = Value(normalization)
expressions += (normalization,)
if cover_density:
self.function = "ts_rank_cd"
super().__init__(*expressions)
class SearchHeadline(Func):
function = "ts_headline"
template = "%(function)s(%(expressions)s%(options)s)"
output_field = TextField()
def __init__(
self,
expression,
query,
*,
config=None,
start_sel=None,
stop_sel=None,
max_words=None,
min_words=None,
short_word=None,
highlight_all=None,
max_fragments=None,
fragment_delimiter=None,
):
if not hasattr(query, "resolve_expression"):
query = SearchQuery(query)
options = {
"StartSel": start_sel,
"StopSel": stop_sel,
"MaxWords": max_words,
"MinWords": min_words,
"ShortWord": short_word,
"HighlightAll": highlight_all,
"MaxFragments": max_fragments,
"FragmentDelimiter": fragment_delimiter,
}
self.options = {
option: value for option, value in options.items() if value is not None
}
expressions = (expression, query)
if config is not None:
config = SearchConfig.from_parameter(config)
expressions = (config,) + expressions
super().__init__(*expressions)
def as_sql(self, compiler, connection, function=None, template=None):
options_sql = ""
options_params = []
if self.options:
options_params.append(
", ".join(
connection.ops.compose_sql(f"{option}=%s", [value])
for option, value in self.options.items()
)
)
options_sql = ", %s"
sql, params = super().as_sql(
compiler,
connection,
function=function,
template=template,
options=options_sql,
)
return sql, params + options_params
SearchVectorField.register_lookup(SearchVectorExact)
class TrigramBase(Func):
output_field = FloatField()
def __init__(self, expression, string, **extra):
if not hasattr(string, "resolve_expression"):
string = Value(string)
super().__init__(expression, string, **extra)
class TrigramWordBase(Func):
output_field = FloatField()
def __init__(self, string, expression, **extra):
if not hasattr(string, "resolve_expression"):
string = Value(string)
super().__init__(string, expression, **extra)
class TrigramSimilarity(TrigramBase):
function = "SIMILARITY"
class TrigramDistance(TrigramBase):
function = ""
arg_joiner = " <-> "
class TrigramWordDistance(TrigramWordBase):
function = ""
arg_joiner = " <<-> "
class TrigramStrictWordDistance(TrigramWordBase):
function = ""
arg_joiner = " <<<-> "
class TrigramWordSimilarity(TrigramWordBase):
function = "WORD_SIMILARITY"
class TrigramStrictWordSimilarity(TrigramWordBase):
function = "STRICT_WORD_SIMILARITY"
|
b1f8e82fed9526b173aefd020b74b85865996e5cd1ff80701439bd6c41ecf01d | from django.apps import AppConfig
from django.core.signals import setting_changed
from django.db import connections
from django.db.backends.postgresql.psycopg_any import RANGE_TYPES
from django.db.backends.signals import connection_created
from django.db.migrations.writer import MigrationWriter
from django.db.models import CharField, OrderBy, TextField
from django.db.models.functions import Collate
from django.db.models.indexes import IndexExpression
from django.utils.translation import gettext_lazy as _
from .indexes import OpClass
from .lookups import (
SearchLookup,
TrigramSimilar,
TrigramStrictWordSimilar,
TrigramWordSimilar,
Unaccent,
)
from .serializers import RangeSerializer
from .signals import register_type_handlers
def uninstall_if_needed(setting, value, enter, **kwargs):
"""
Undo the effects of PostgresConfig.ready() when django.contrib.postgres
is "uninstalled" by override_settings().
"""
if (
not enter
and setting == "INSTALLED_APPS"
and "django.contrib.postgres" not in set(value)
):
connection_created.disconnect(register_type_handlers)
CharField._unregister_lookup(Unaccent)
TextField._unregister_lookup(Unaccent)
CharField._unregister_lookup(SearchLookup)
TextField._unregister_lookup(SearchLookup)
CharField._unregister_lookup(TrigramSimilar)
TextField._unregister_lookup(TrigramSimilar)
CharField._unregister_lookup(TrigramWordSimilar)
TextField._unregister_lookup(TrigramWordSimilar)
CharField._unregister_lookup(TrigramStrictWordSimilar)
TextField._unregister_lookup(TrigramStrictWordSimilar)
# Disconnect this receiver until the next time this app is installed
# and ready() connects it again to prevent unnecessary processing on
# each setting change.
setting_changed.disconnect(uninstall_if_needed)
MigrationWriter.unregister_serializer(RANGE_TYPES)
class PostgresConfig(AppConfig):
name = "django.contrib.postgres"
verbose_name = _("PostgreSQL extensions")
def ready(self):
setting_changed.connect(uninstall_if_needed)
# Connections may already exist before we are called.
for conn in connections.all(initialized_only=True):
if conn.vendor == "postgresql":
conn.introspection.data_types_reverse.update(
{
3904: "django.contrib.postgres.fields.IntegerRangeField",
3906: "django.contrib.postgres.fields.DecimalRangeField",
3910: "django.contrib.postgres.fields.DateTimeRangeField",
3912: "django.contrib.postgres.fields.DateRangeField",
3926: "django.contrib.postgres.fields.BigIntegerRangeField",
}
)
if conn.connection is not None:
register_type_handlers(conn)
connection_created.connect(register_type_handlers)
CharField.register_lookup(Unaccent)
TextField.register_lookup(Unaccent)
CharField.register_lookup(SearchLookup)
TextField.register_lookup(SearchLookup)
CharField.register_lookup(TrigramSimilar)
TextField.register_lookup(TrigramSimilar)
CharField.register_lookup(TrigramWordSimilar)
TextField.register_lookup(TrigramWordSimilar)
CharField.register_lookup(TrigramStrictWordSimilar)
TextField.register_lookup(TrigramStrictWordSimilar)
MigrationWriter.register_serializer(RANGE_TYPES, RangeSerializer)
IndexExpression.register_wrappers(OrderBy, OpClass, Collate)
|
340c4c0b304c8f130dbc405675189b6d2a49fd487285e4e82535489910064bab | from django.contrib.postgres.signals import (
get_citext_oids,
get_hstore_oids,
register_type_handlers,
)
from django.db import NotSupportedError, router
from django.db.migrations import AddConstraint, AddIndex, RemoveIndex
from django.db.migrations.operations.base import Operation
from django.db.models.constraints import CheckConstraint
class CreateExtension(Operation):
reversible = True
def __init__(self, name):
self.name = name
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.vendor != "postgresql" or not router.allow_migrate(
schema_editor.connection.alias, app_label
):
return
if not self.extension_exists(schema_editor, self.name):
schema_editor.execute(
"CREATE EXTENSION IF NOT EXISTS %s"
% schema_editor.quote_name(self.name)
)
# Clear cached, stale oids.
get_hstore_oids.cache_clear()
get_citext_oids.cache_clear()
# Registering new type handlers cannot be done before the extension is
# installed, otherwise a subsequent data migration would use the same
# connection.
register_type_handlers(schema_editor.connection)
if hasattr(schema_editor.connection, "register_geometry_adapters"):
schema_editor.connection.register_geometry_adapters(
schema_editor.connection.connection, True
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if not router.allow_migrate(schema_editor.connection.alias, app_label):
return
if self.extension_exists(schema_editor, self.name):
schema_editor.execute(
"DROP EXTENSION IF EXISTS %s" % schema_editor.quote_name(self.name)
)
# Clear cached, stale oids.
get_hstore_oids.cache_clear()
get_citext_oids.cache_clear()
def extension_exists(self, schema_editor, extension):
with schema_editor.connection.cursor() as cursor:
cursor.execute(
"SELECT 1 FROM pg_extension WHERE extname = %s",
[extension],
)
return bool(cursor.fetchone())
def describe(self):
return "Creates extension %s" % self.name
@property
def migration_name_fragment(self):
return "create_extension_%s" % self.name
class BloomExtension(CreateExtension):
def __init__(self):
self.name = "bloom"
class BtreeGinExtension(CreateExtension):
def __init__(self):
self.name = "btree_gin"
class BtreeGistExtension(CreateExtension):
def __init__(self):
self.name = "btree_gist"
class CITextExtension(CreateExtension):
def __init__(self):
self.name = "citext"
class CryptoExtension(CreateExtension):
def __init__(self):
self.name = "pgcrypto"
class HStoreExtension(CreateExtension):
def __init__(self):
self.name = "hstore"
class TrigramExtension(CreateExtension):
def __init__(self):
self.name = "pg_trgm"
class UnaccentExtension(CreateExtension):
def __init__(self):
self.name = "unaccent"
class NotInTransactionMixin:
def _ensure_not_in_transaction(self, schema_editor):
if schema_editor.connection.in_atomic_block:
raise NotSupportedError(
"The %s operation cannot be executed inside a transaction "
"(set atomic = False on the migration)." % self.__class__.__name__
)
class AddIndexConcurrently(NotInTransactionMixin, AddIndex):
"""Create an index using PostgreSQL's CREATE INDEX CONCURRENTLY syntax."""
atomic = False
def describe(self):
return "Concurrently create index %s on field(s) %s of model %s" % (
self.index.name,
", ".join(self.index.fields),
self.model_name,
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
self._ensure_not_in_transaction(schema_editor)
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_index(model, self.index, concurrently=True)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self._ensure_not_in_transaction(schema_editor)
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.remove_index(model, self.index, concurrently=True)
class RemoveIndexConcurrently(NotInTransactionMixin, RemoveIndex):
"""Remove an index using PostgreSQL's DROP INDEX CONCURRENTLY syntax."""
atomic = False
def describe(self):
return "Concurrently remove index %s from %s" % (self.name, self.model_name)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
self._ensure_not_in_transaction(schema_editor)
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
from_model_state = from_state.models[app_label, self.model_name_lower]
index = from_model_state.get_index_by_name(self.name)
schema_editor.remove_index(model, index, concurrently=True)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self._ensure_not_in_transaction(schema_editor)
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
to_model_state = to_state.models[app_label, self.model_name_lower]
index = to_model_state.get_index_by_name(self.name)
schema_editor.add_index(model, index, concurrently=True)
class CollationOperation(Operation):
def __init__(self, name, locale, *, provider="libc", deterministic=True):
self.name = name
self.locale = locale
self.provider = provider
self.deterministic = deterministic
def state_forwards(self, app_label, state):
pass
def deconstruct(self):
kwargs = {"name": self.name, "locale": self.locale}
if self.provider and self.provider != "libc":
kwargs["provider"] = self.provider
if self.deterministic is False:
kwargs["deterministic"] = self.deterministic
return (
self.__class__.__qualname__,
[],
kwargs,
)
def create_collation(self, schema_editor):
args = {"locale": schema_editor.quote_name(self.locale)}
if self.provider != "libc":
args["provider"] = schema_editor.quote_name(self.provider)
if self.deterministic is False:
args["deterministic"] = "false"
schema_editor.execute(
"CREATE COLLATION %(name)s (%(args)s)"
% {
"name": schema_editor.quote_name(self.name),
"args": ", ".join(
f"{option}={value}" for option, value in args.items()
),
}
)
def remove_collation(self, schema_editor):
schema_editor.execute(
"DROP COLLATION %s" % schema_editor.quote_name(self.name),
)
class CreateCollation(CollationOperation):
"""Create a collation."""
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.vendor != "postgresql" or not router.allow_migrate(
schema_editor.connection.alias, app_label
):
return
self.create_collation(schema_editor)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if not router.allow_migrate(schema_editor.connection.alias, app_label):
return
self.remove_collation(schema_editor)
def describe(self):
return f"Create collation {self.name}"
@property
def migration_name_fragment(self):
return "create_collation_%s" % self.name.lower()
class RemoveCollation(CollationOperation):
"""Remove a collation."""
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.vendor != "postgresql" or not router.allow_migrate(
schema_editor.connection.alias, app_label
):
return
self.remove_collation(schema_editor)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if not router.allow_migrate(schema_editor.connection.alias, app_label):
return
self.create_collation(schema_editor)
def describe(self):
return f"Remove collation {self.name}"
@property
def migration_name_fragment(self):
return "remove_collation_%s" % self.name.lower()
class AddConstraintNotValid(AddConstraint):
"""
Add a table constraint without enforcing validation, using PostgreSQL's
NOT VALID syntax.
"""
def __init__(self, model_name, constraint):
if not isinstance(constraint, CheckConstraint):
raise TypeError(
"AddConstraintNotValid.constraint must be a check constraint."
)
super().__init__(model_name, constraint)
def describe(self):
return "Create not valid constraint %s on model %s" % (
self.constraint.name,
self.model_name,
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
constraint_sql = self.constraint.create_sql(model, schema_editor)
if constraint_sql:
# Constraint.create_sql returns interpolated SQL which makes
# params=None a necessity to avoid escaping attempts on
# execution.
schema_editor.execute(str(constraint_sql) + " NOT VALID", params=None)
@property
def migration_name_fragment(self):
return super().migration_name_fragment + "_not_valid"
class ValidateConstraint(Operation):
"""Validate a table NOT VALID constraint."""
def __init__(self, model_name, name):
self.model_name = model_name
self.name = name
def describe(self):
return "Validate constraint %s on model %s" % (self.name, self.model_name)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.execute(
"ALTER TABLE %s VALIDATE CONSTRAINT %s"
% (
schema_editor.quote_name(model._meta.db_table),
schema_editor.quote_name(self.name),
)
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
# PostgreSQL does not provide a way to make a constraint invalid.
pass
def state_forwards(self, app_label, state):
pass
@property
def migration_name_fragment(self):
return "%s_validate_%s" % (self.model_name.lower(), self.name.lower())
def deconstruct(self):
return (
self.__class__.__name__,
[],
{
"model_name": self.model_name,
"name": self.name,
},
)
|
72991a79d6f2bda8e937834d0182c0e937ca23fe1f7bd014e3409e6216264bc4 | import functools
from django.db import connections
from django.db.backends.base.base import NO_DB_ALIAS
from django.db.backends.postgresql.psycopg_any import is_psycopg3
def get_type_oids(connection_alias, type_name):
with connections[connection_alias].cursor() as cursor:
cursor.execute(
"SELECT oid, typarray FROM pg_type WHERE typname = %s", (type_name,)
)
oids = []
array_oids = []
for row in cursor:
oids.append(row[0])
array_oids.append(row[1])
return tuple(oids), tuple(array_oids)
@functools.lru_cache
def get_hstore_oids(connection_alias):
"""Return hstore and hstore array OIDs."""
return get_type_oids(connection_alias, "hstore")
@functools.lru_cache
def get_citext_oids(connection_alias):
"""Return citext and citext array OIDs."""
return get_type_oids(connection_alias, "citext")
if is_psycopg3:
from psycopg.types import TypeInfo, hstore
def register_type_handlers(connection, **kwargs):
if connection.vendor != "postgresql" or connection.alias == NO_DB_ALIAS:
return
oids, array_oids = get_hstore_oids(connection.alias)
for oid, array_oid in zip(oids, array_oids):
ti = TypeInfo("hstore", oid, array_oid)
hstore.register_hstore(ti, connection.connection)
_, citext_oids = get_citext_oids(connection.alias)
for array_oid in citext_oids:
ti = TypeInfo("citext", 0, array_oid)
ti.register(connection.connection)
else:
import psycopg2
from psycopg2.extras import register_hstore
def register_type_handlers(connection, **kwargs):
if connection.vendor != "postgresql" or connection.alias == NO_DB_ALIAS:
return
oids, array_oids = get_hstore_oids(connection.alias)
# Don't register handlers when hstore is not available on the database.
#
# If someone tries to create an hstore field it will error there. This is
# necessary as someone may be using PSQL without extensions installed but
# be using other features of contrib.postgres.
#
# This is also needed in order to create the connection in order to install
# the hstore extension.
if oids:
register_hstore(
connection.connection, globally=True, oid=oids, array_oid=array_oids
)
oids, citext_oids = get_citext_oids(connection.alias)
# Don't register handlers when citext is not available on the database.
#
# The same comments in the above call to register_hstore() also apply here.
if oids:
array_type = psycopg2.extensions.new_array_type(
citext_oids, "citext[]", psycopg2.STRING
)
psycopg2.extensions.register_type(array_type, None)
|
84f3e65fa4b33ad4487756e59837090b8246acb833043bef79fb29959c8feff6 | import unicodedata
from django import forms
from django.contrib.auth import authenticate, get_user_model, password_validation
from django.contrib.auth.hashers import UNUSABLE_PASSWORD_PREFIX, identify_hasher
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ValidationError
from django.core.mail import EmailMultiAlternatives
from django.template import loader
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
from django.utils.text import capfirst
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
UserModel = get_user_model()
def _unicode_ci_compare(s1, s2):
"""
Perform case-insensitive comparison of two identifiers, using the
recommended algorithm from Unicode Technical Report 36, section
2.11.2(B)(2).
"""
return (
unicodedata.normalize("NFKC", s1).casefold()
== unicodedata.normalize("NFKC", s2).casefold()
)
class ReadOnlyPasswordHashWidget(forms.Widget):
template_name = "auth/widgets/read_only_password_hash.html"
read_only = True
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
summary = []
if not value or value.startswith(UNUSABLE_PASSWORD_PREFIX):
summary.append({"label": gettext("No password set.")})
else:
try:
hasher = identify_hasher(value)
except ValueError:
summary.append(
{
"label": gettext(
"Invalid password format or unknown hashing algorithm."
)
}
)
else:
for key, value_ in hasher.safe_summary(value).items():
summary.append({"label": gettext(key), "value": value_})
context["summary"] = summary
return context
def id_for_label(self, id_):
return None
class ReadOnlyPasswordHashField(forms.Field):
widget = ReadOnlyPasswordHashWidget
def __init__(self, *args, **kwargs):
kwargs.setdefault("required", False)
kwargs.setdefault("disabled", True)
super().__init__(*args, **kwargs)
class UsernameField(forms.CharField):
def to_python(self, value):
return unicodedata.normalize("NFKC", super().to_python(value))
def widget_attrs(self, widget):
return {
**super().widget_attrs(widget),
"autocapitalize": "none",
"autocomplete": "username",
}
class BaseUserCreationForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
"password_mismatch": _("The two password fields didn’t match."),
}
password1 = forms.CharField(
label=_("Password"),
strip=False,
widget=forms.PasswordInput(attrs={"autocomplete": "new-password"}),
help_text=password_validation.password_validators_help_text_html(),
)
password2 = forms.CharField(
label=_("Password confirmation"),
widget=forms.PasswordInput(attrs={"autocomplete": "new-password"}),
strip=False,
help_text=_("Enter the same password as before, for verification."),
)
class Meta:
model = User
fields = ("username",)
field_classes = {"username": UsernameField}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self._meta.model.USERNAME_FIELD in self.fields:
self.fields[self._meta.model.USERNAME_FIELD].widget.attrs[
"autofocus"
] = True
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise ValidationError(
self.error_messages["password_mismatch"],
code="password_mismatch",
)
return password2
def _post_clean(self):
super()._post_clean()
# Validate the password after self.instance is updated with form data
# by super().
password = self.cleaned_data.get("password2")
if password:
try:
password_validation.validate_password(password, self.instance)
except ValidationError as error:
self.add_error("password2", error)
def save(self, commit=True):
user = super().save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
if hasattr(self, "save_m2m"):
self.save_m2m()
return user
class UserCreationForm(BaseUserCreationForm):
error_messages = {
**BaseUserCreationForm.error_messages,
"unique": _("A user with that username already exists."),
}
def clean_username(self):
"""Reject usernames that differ only in case."""
username = self.cleaned_data.get("username")
if username and User.objects.filter(username__iexact=username).exists():
raise forms.ValidationError(self.error_messages["unique"], code="unique")
else:
return username
class UserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField(
label=_("Password"),
help_text=_(
"Raw passwords are not stored, so there is no way to see this "
"user’s password, but you can change the password using "
'<a href="{}">this form</a>.'
),
)
class Meta:
model = User
fields = "__all__"
field_classes = {"username": UsernameField}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
password = self.fields.get("password")
if password:
password.help_text = password.help_text.format(
f"../../{self.instance.pk}/password/"
)
user_permissions = self.fields.get("user_permissions")
if user_permissions:
user_permissions.queryset = user_permissions.queryset.select_related(
"content_type"
)
class AuthenticationForm(forms.Form):
"""
Base class for authenticating users. Extend this to get a form that accepts
username/password logins.
"""
username = UsernameField(widget=forms.TextInput(attrs={"autofocus": True}))
password = forms.CharField(
label=_("Password"),
strip=False,
widget=forms.PasswordInput(attrs={"autocomplete": "current-password"}),
)
error_messages = {
"invalid_login": _(
"Please enter a correct %(username)s and password. Note that both "
"fields may be case-sensitive."
),
"inactive": _("This account is inactive."),
}
def __init__(self, request=None, *args, **kwargs):
"""
The 'request' parameter is set for custom auth use by subclasses.
The form data comes in via the standard 'data' kwarg.
"""
self.request = request
self.user_cache = None
super().__init__(*args, **kwargs)
# Set the max length and label for the "username" field.
self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD)
username_max_length = self.username_field.max_length or 254
self.fields["username"].max_length = username_max_length
self.fields["username"].widget.attrs["maxlength"] = username_max_length
if self.fields["username"].label is None:
self.fields["username"].label = capfirst(self.username_field.verbose_name)
def clean(self):
username = self.cleaned_data.get("username")
password = self.cleaned_data.get("password")
if username is not None and password:
self.user_cache = authenticate(
self.request, username=username, password=password
)
if self.user_cache is None:
raise self.get_invalid_login_error()
else:
self.confirm_login_allowed(self.user_cache)
return self.cleaned_data
def confirm_login_allowed(self, user):
"""
Controls whether the given User may log in. This is a policy setting,
independent of end-user authentication. This default behavior is to
allow login by active users, and reject login by inactive users.
If the given user cannot log in, this method should raise a
``ValidationError``.
If the given user may log in, this method should return None.
"""
if not user.is_active:
raise ValidationError(
self.error_messages["inactive"],
code="inactive",
)
def get_user(self):
return self.user_cache
def get_invalid_login_error(self):
return ValidationError(
self.error_messages["invalid_login"],
code="invalid_login",
params={"username": self.username_field.verbose_name},
)
class PasswordResetForm(forms.Form):
email = forms.EmailField(
label=_("Email"),
max_length=254,
widget=forms.EmailInput(attrs={"autocomplete": "email"}),
)
def send_mail(
self,
subject_template_name,
email_template_name,
context,
from_email,
to_email,
html_email_template_name=None,
):
"""
Send a django.core.mail.EmailMultiAlternatives to `to_email`.
"""
subject = loader.render_to_string(subject_template_name, context)
# Email subject *must not* contain newlines
subject = "".join(subject.splitlines())
body = loader.render_to_string(email_template_name, context)
email_message = EmailMultiAlternatives(subject, body, from_email, [to_email])
if html_email_template_name is not None:
html_email = loader.render_to_string(html_email_template_name, context)
email_message.attach_alternative(html_email, "text/html")
email_message.send()
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This allows subclasses to more easily customize the default policies
that prevent inactive users and users with unusable passwords from
resetting their password.
"""
email_field_name = UserModel.get_email_field_name()
active_users = UserModel._default_manager.filter(
**{
"%s__iexact" % email_field_name: email,
"is_active": True,
}
)
return (
u
for u in active_users
if u.has_usable_password()
and _unicode_ci_compare(email, getattr(u, email_field_name))
)
def save(
self,
domain_override=None,
subject_template_name="registration/password_reset_subject.txt",
email_template_name="registration/password_reset_email.html",
use_https=False,
token_generator=default_token_generator,
from_email=None,
request=None,
html_email_template_name=None,
extra_email_context=None,
):
"""
Generate a one-use only link for resetting password and send it to the
user.
"""
email = self.cleaned_data["email"]
if not domain_override:
current_site = get_current_site(request)
site_name = current_site.name
domain = current_site.domain
else:
site_name = domain = domain_override
email_field_name = UserModel.get_email_field_name()
for user in self.get_users(email):
user_email = getattr(user, email_field_name)
context = {
"email": user_email,
"domain": domain,
"site_name": site_name,
"uid": urlsafe_base64_encode(force_bytes(user.pk)),
"user": user,
"token": token_generator.make_token(user),
"protocol": "https" if use_https else "http",
**(extra_email_context or {}),
}
self.send_mail(
subject_template_name,
email_template_name,
context,
from_email,
user_email,
html_email_template_name=html_email_template_name,
)
class SetPasswordForm(forms.Form):
"""
A form that lets a user set their password without entering the old
password
"""
error_messages = {
"password_mismatch": _("The two password fields didn’t match."),
}
new_password1 = forms.CharField(
label=_("New password"),
widget=forms.PasswordInput(attrs={"autocomplete": "new-password"}),
strip=False,
help_text=password_validation.password_validators_help_text_html(),
)
new_password2 = forms.CharField(
label=_("New password confirmation"),
strip=False,
widget=forms.PasswordInput(attrs={"autocomplete": "new-password"}),
)
def __init__(self, user, *args, **kwargs):
self.user = user
super().__init__(*args, **kwargs)
def clean_new_password2(self):
password1 = self.cleaned_data.get("new_password1")
password2 = self.cleaned_data.get("new_password2")
if password1 and password2 and password1 != password2:
raise ValidationError(
self.error_messages["password_mismatch"],
code="password_mismatch",
)
password_validation.validate_password(password2, self.user)
return password2
def save(self, commit=True):
password = self.cleaned_data["new_password1"]
self.user.set_password(password)
if commit:
self.user.save()
return self.user
class PasswordChangeForm(SetPasswordForm):
"""
A form that lets a user change their password by entering their old
password.
"""
error_messages = {
**SetPasswordForm.error_messages,
"password_incorrect": _(
"Your old password was entered incorrectly. Please enter it again."
),
}
old_password = forms.CharField(
label=_("Old password"),
strip=False,
widget=forms.PasswordInput(
attrs={"autocomplete": "current-password", "autofocus": True}
),
)
field_order = ["old_password", "new_password1", "new_password2"]
def clean_old_password(self):
"""
Validate that the old_password field is correct.
"""
old_password = self.cleaned_data["old_password"]
if not self.user.check_password(old_password):
raise ValidationError(
self.error_messages["password_incorrect"],
code="password_incorrect",
)
return old_password
class AdminPasswordChangeForm(forms.Form):
"""
A form used to change the password of a user in the admin interface.
"""
error_messages = {
"password_mismatch": _("The two password fields didn’t match."),
}
required_css_class = "required"
password1 = forms.CharField(
label=_("Password"),
widget=forms.PasswordInput(
attrs={"autocomplete": "new-password", "autofocus": True}
),
strip=False,
help_text=password_validation.password_validators_help_text_html(),
)
password2 = forms.CharField(
label=_("Password (again)"),
widget=forms.PasswordInput(attrs={"autocomplete": "new-password"}),
strip=False,
help_text=_("Enter the same password as before, for verification."),
)
def __init__(self, user, *args, **kwargs):
self.user = user
super().__init__(*args, **kwargs)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise ValidationError(
self.error_messages["password_mismatch"],
code="password_mismatch",
)
password_validation.validate_password(password2, self.user)
return password2
def save(self, commit=True):
"""Save the new password."""
password = self.cleaned_data["password1"]
self.user.set_password(password)
if commit:
self.user.save()
return self.user
@property
def changed_data(self):
data = super().changed_data
for name in self.fields:
if name not in data:
return []
return ["password"]
|
f2012db24d9ee44c9bfa53ecd567528e852c17d712301d40b438c64f1450cb13 | import json
import os
import posixpath
import re
from urllib.parse import unquote, urldefrag, urlsplit, urlunsplit
from django.conf import settings
from django.contrib.staticfiles.utils import check_settings, matches_patterns
from django.core.exceptions import ImproperlyConfigured
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage, get_storage_class
from django.utils.crypto import md5
from django.utils.functional import LazyObject
class StaticFilesStorage(FileSystemStorage):
"""
Standard file system storage for static files.
The defaults for ``location`` and ``base_url`` are
``STATIC_ROOT`` and ``STATIC_URL``.
"""
def __init__(self, location=None, base_url=None, *args, **kwargs):
if location is None:
location = settings.STATIC_ROOT
if base_url is None:
base_url = settings.STATIC_URL
check_settings(base_url)
super().__init__(location, base_url, *args, **kwargs)
# FileSystemStorage fallbacks to MEDIA_ROOT when location
# is empty, so we restore the empty value.
if not location:
self.base_location = None
self.location = None
def path(self, name):
if not self.location:
raise ImproperlyConfigured(
"You're using the staticfiles app "
"without having set the STATIC_ROOT "
"setting to a filesystem path."
)
return super().path(name)
class HashedFilesMixin:
default_template = """url("%(url)s")"""
max_post_process_passes = 5
patterns = (
(
"*.css",
(
r"""(?P<matched>url\(['"]{0,1}\s*(?P<url>.*?)["']{0,1}\))""",
(
r"""(?P<matched>@import\s*["']\s*(?P<url>.*?)["'])""",
"""@import url("%(url)s")""",
),
(
(
r"(?m)(?P<matched>)^(/\*#[ \t]"
r"(?-i:sourceMappingURL)=(?P<url>.*)[ \t]*\*/)$"
),
"/*# sourceMappingURL=%(url)s */",
),
),
),
(
"*.js",
(
(
r"(?m)(?P<matched>)^(//# (?-i:sourceMappingURL)=(?P<url>.*))$",
"//# sourceMappingURL=%(url)s",
),
(
(
r"""(?P<matched>import(?s:(?P<import>[\s\{].*?))"""
r"""\s*from\s*['"](?P<url>[\.\/].*?)["']\s*;)"""
),
"""import%(import)s from "%(url)s";""",
),
(
(
r"""(?P<matched>export(?s:(?P<exports>[\s\{].*?))"""
r"""\s*from\s*["'](?P<url>[\.\/].*?)["']\s*;)"""
),
"""export%(exports)s from "%(url)s";""",
),
(
r"""(?P<matched>import\s*['"](?P<url>[\.\/].*?)["']\s*;)""",
"""import"%(url)s";""",
),
(
r"""(?P<matched>import\(["'](?P<url>.*?)["']\))""",
"""import("%(url)s")""",
),
),
),
)
keep_intermediate_files = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._patterns = {}
self.hashed_files = {}
for extension, patterns in self.patterns:
for pattern in patterns:
if isinstance(pattern, (tuple, list)):
pattern, template = pattern
else:
template = self.default_template
compiled = re.compile(pattern, re.IGNORECASE)
self._patterns.setdefault(extension, []).append((compiled, template))
def file_hash(self, name, content=None):
"""
Return a hash of the file with the given name and optional content.
"""
if content is None:
return None
hasher = md5(usedforsecurity=False)
for chunk in content.chunks():
hasher.update(chunk)
return hasher.hexdigest()[:12]
def hashed_name(self, name, content=None, filename=None):
# `filename` is the name of file to hash if `content` isn't given.
# `name` is the base name to construct the new hashed filename from.
parsed_name = urlsplit(unquote(name))
clean_name = parsed_name.path.strip()
filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name
opened = content is None
if opened:
if not self.exists(filename):
raise ValueError(
"The file '%s' could not be found with %r." % (filename, self)
)
try:
content = self.open(filename)
except OSError:
# Handle directory paths and fragments
return name
try:
file_hash = self.file_hash(clean_name, content)
finally:
if opened:
content.close()
path, filename = os.path.split(clean_name)
root, ext = os.path.splitext(filename)
file_hash = (".%s" % file_hash) if file_hash else ""
hashed_name = os.path.join(path, "%s%s%s" % (root, file_hash, ext))
unparsed_name = list(parsed_name)
unparsed_name[2] = hashed_name
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
if "?#" in name and not unparsed_name[3]:
unparsed_name[2] += "?"
return urlunsplit(unparsed_name)
def _url(self, hashed_name_func, name, force=False, hashed_files=None):
"""
Return the non-hashed URL in DEBUG mode.
"""
if settings.DEBUG and not force:
hashed_name, fragment = name, ""
else:
clean_name, fragment = urldefrag(name)
if urlsplit(clean_name).path.endswith("/"): # don't hash paths
hashed_name = name
else:
args = (clean_name,)
if hashed_files is not None:
args += (hashed_files,)
hashed_name = hashed_name_func(*args)
final_url = super().url(hashed_name)
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
query_fragment = "?#" in name # [sic!]
if fragment or query_fragment:
urlparts = list(urlsplit(final_url))
if fragment and not urlparts[4]:
urlparts[4] = fragment
if query_fragment and not urlparts[3]:
urlparts[2] += "?"
final_url = urlunsplit(urlparts)
return unquote(final_url)
def url(self, name, force=False):
"""
Return the non-hashed URL in DEBUG mode.
"""
return self._url(self.stored_name, name, force)
def url_converter(self, name, hashed_files, template=None):
"""
Return the custom URL converter for the given file name.
"""
if template is None:
template = self.default_template
def converter(matchobj):
"""
Convert the matched URL to a normalized and hashed URL.
This requires figuring out which files the matched URL resolves
to and calling the url() method of the storage.
"""
matches = matchobj.groupdict()
matched = matches["matched"]
url = matches["url"]
# Ignore absolute/protocol-relative and data-uri URLs.
if re.match(r"^[a-z]+:", url):
return matched
# Ignore absolute URLs that don't point to a static file (dynamic
# CSS / JS?). Note that STATIC_URL cannot be empty.
if url.startswith("/") and not url.startswith(settings.STATIC_URL):
return matched
# Strip off the fragment so a path-like fragment won't interfere.
url_path, fragment = urldefrag(url)
# Ignore URLs without a path
if not url_path:
return matched
if url_path.startswith("/"):
# Otherwise the condition above would have returned prematurely.
assert url_path.startswith(settings.STATIC_URL)
target_name = url_path[len(settings.STATIC_URL) :]
else:
# We're using the posixpath module to mix paths and URLs conveniently.
source_name = name if os.sep == "/" else name.replace(os.sep, "/")
target_name = posixpath.join(posixpath.dirname(source_name), url_path)
# Determine the hashed name of the target file with the storage backend.
hashed_url = self._url(
self._stored_name,
unquote(target_name),
force=True,
hashed_files=hashed_files,
)
transformed_url = "/".join(
url_path.split("/")[:-1] + hashed_url.split("/")[-1:]
)
# Restore the fragment that was stripped off earlier.
if fragment:
transformed_url += ("?#" if "?#" in url else "#") + fragment
# Return the hashed version to the file
matches["url"] = unquote(transformed_url)
return template % matches
return converter
def post_process(self, paths, dry_run=False, **options):
"""
Post process the given dictionary of files (called from collectstatic).
Processing is actually two separate operations:
1. renaming files to include a hash of their content for cache-busting,
and copying those files to the target storage.
2. adjusting files which contain references to other files so they
refer to the cache-busting filenames.
If either of these are performed on a file, then that file is considered
post-processed.
"""
# don't even dare to process the files if we're in dry run mode
if dry_run:
return
# where to store the new paths
hashed_files = {}
# build a list of adjustable files
adjustable_paths = [
path for path in paths if matches_patterns(path, self._patterns)
]
# Adjustable files to yield at end, keyed by the original path.
processed_adjustable_paths = {}
# Do a single pass first. Post-process all files once, yielding not
# adjustable files and exceptions, and collecting adjustable files.
for name, hashed_name, processed, _ in self._post_process(
paths, adjustable_paths, hashed_files
):
if name not in adjustable_paths or isinstance(processed, Exception):
yield name, hashed_name, processed
else:
processed_adjustable_paths[name] = (name, hashed_name, processed)
paths = {path: paths[path] for path in adjustable_paths}
substitutions = False
for i in range(self.max_post_process_passes):
substitutions = False
for name, hashed_name, processed, subst in self._post_process(
paths, adjustable_paths, hashed_files
):
# Overwrite since hashed_name may be newer.
processed_adjustable_paths[name] = (name, hashed_name, processed)
substitutions = substitutions or subst
if not substitutions:
break
if substitutions:
yield "All", None, RuntimeError("Max post-process passes exceeded.")
# Store the processed paths
self.hashed_files.update(hashed_files)
# Yield adjustable files with final, hashed name.
yield from processed_adjustable_paths.values()
def _post_process(self, paths, adjustable_paths, hashed_files):
# Sort the files by directory level
def path_level(name):
return len(name.split(os.sep))
for name in sorted(paths, key=path_level, reverse=True):
substitutions = True
# use the original, local file, not the copied-but-unprocessed
# file, which might be somewhere far away, like S3
storage, path = paths[name]
with storage.open(path) as original_file:
cleaned_name = self.clean_name(name)
hash_key = self.hash_key(cleaned_name)
# generate the hash with the original content, even for
# adjustable files.
if hash_key not in hashed_files:
hashed_name = self.hashed_name(name, original_file)
else:
hashed_name = hashed_files[hash_key]
# then get the original's file content..
if hasattr(original_file, "seek"):
original_file.seek(0)
hashed_file_exists = self.exists(hashed_name)
processed = False
# ..to apply each replacement pattern to the content
if name in adjustable_paths:
old_hashed_name = hashed_name
content = original_file.read().decode("utf-8")
for extension, patterns in self._patterns.items():
if matches_patterns(path, (extension,)):
for pattern, template in patterns:
converter = self.url_converter(
name, hashed_files, template
)
try:
content = pattern.sub(converter, content)
except ValueError as exc:
yield name, None, exc, False
if hashed_file_exists:
self.delete(hashed_name)
# then save the processed result
content_file = ContentFile(content.encode())
if self.keep_intermediate_files:
# Save intermediate file for reference
self._save(hashed_name, content_file)
hashed_name = self.hashed_name(name, content_file)
if self.exists(hashed_name):
self.delete(hashed_name)
saved_name = self._save(hashed_name, content_file)
hashed_name = self.clean_name(saved_name)
# If the file hash stayed the same, this file didn't change
if old_hashed_name == hashed_name:
substitutions = False
processed = True
if not processed:
# or handle the case in which neither processing nor
# a change to the original file happened
if not hashed_file_exists:
processed = True
saved_name = self._save(hashed_name, original_file)
hashed_name = self.clean_name(saved_name)
# and then set the cache accordingly
hashed_files[hash_key] = hashed_name
yield name, hashed_name, processed, substitutions
def clean_name(self, name):
return name.replace("\\", "/")
def hash_key(self, name):
return name
def _stored_name(self, name, hashed_files):
# Normalize the path to avoid multiple names for the same file like
# ../foo/bar.css and ../foo/../foo/bar.css which normalize to the same
# path.
name = posixpath.normpath(name)
cleaned_name = self.clean_name(name)
hash_key = self.hash_key(cleaned_name)
cache_name = hashed_files.get(hash_key)
if cache_name is None:
cache_name = self.clean_name(self.hashed_name(name))
return cache_name
def stored_name(self, name):
cleaned_name = self.clean_name(name)
hash_key = self.hash_key(cleaned_name)
cache_name = self.hashed_files.get(hash_key)
if cache_name:
return cache_name
# No cached name found, recalculate it from the files.
intermediate_name = name
for i in range(self.max_post_process_passes + 1):
cache_name = self.clean_name(
self.hashed_name(name, content=None, filename=intermediate_name)
)
if intermediate_name == cache_name:
# Store the hashed name if there was a miss.
self.hashed_files[hash_key] = cache_name
return cache_name
else:
# Move on to the next intermediate file.
intermediate_name = cache_name
# If the cache name can't be determined after the max number of passes,
# the intermediate files on disk may be corrupt; avoid an infinite loop.
raise ValueError("The name '%s' could not be hashed with %r." % (name, self))
class ManifestFilesMixin(HashedFilesMixin):
manifest_version = "1.1" # the manifest format standard
manifest_name = "staticfiles.json"
manifest_strict = True
keep_intermediate_files = False
def __init__(self, *args, manifest_storage=None, **kwargs):
super().__init__(*args, **kwargs)
if manifest_storage is None:
manifest_storage = self
self.manifest_storage = manifest_storage
self.hashed_files, self.manifest_hash = self.load_manifest()
def read_manifest(self):
try:
with self.manifest_storage.open(self.manifest_name) as manifest:
return manifest.read().decode()
except FileNotFoundError:
return None
def load_manifest(self):
content = self.read_manifest()
if content is None:
return {}, ""
try:
stored = json.loads(content)
except json.JSONDecodeError:
pass
else:
version = stored.get("version")
if version in ("1.0", "1.1"):
return stored.get("paths", {}), stored.get("hash", "")
raise ValueError(
"Couldn't load manifest '%s' (version %s)"
% (self.manifest_name, self.manifest_version)
)
def post_process(self, *args, **kwargs):
self.hashed_files = {}
yield from super().post_process(*args, **kwargs)
if not kwargs.get("dry_run"):
self.save_manifest()
def save_manifest(self):
self.manifest_hash = self.file_hash(
None, ContentFile(json.dumps(sorted(self.hashed_files.items())).encode())
)
payload = {
"paths": self.hashed_files,
"version": self.manifest_version,
"hash": self.manifest_hash,
}
if self.manifest_storage.exists(self.manifest_name):
self.manifest_storage.delete(self.manifest_name)
contents = json.dumps(payload).encode()
self.manifest_storage._save(self.manifest_name, ContentFile(contents))
def stored_name(self, name):
parsed_name = urlsplit(unquote(name))
clean_name = parsed_name.path.strip()
hash_key = self.hash_key(clean_name)
cache_name = self.hashed_files.get(hash_key)
if cache_name is None:
if self.manifest_strict:
raise ValueError(
"Missing staticfiles manifest entry for '%s'" % clean_name
)
cache_name = self.clean_name(self.hashed_name(name))
unparsed_name = list(parsed_name)
unparsed_name[2] = cache_name
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
if "?#" in name and not unparsed_name[3]:
unparsed_name[2] += "?"
return urlunsplit(unparsed_name)
class ManifestStaticFilesStorage(ManifestFilesMixin, StaticFilesStorage):
"""
A static file system storage backend which also saves
hashed copies of the files it saves.
"""
pass
class ConfiguredStorage(LazyObject):
def _setup(self):
self._wrapped = get_storage_class(settings.STATICFILES_STORAGE)()
staticfiles_storage = ConfiguredStorage()
|
029ec7e4741321f53bbd45fe5eb2890473cf4778c7baf3a87ed55ab45409c372 | import json
from django.contrib.postgres import lookups
from django.contrib.postgres.forms import SimpleArrayField
from django.contrib.postgres.validators import ArrayMaxLengthValidator
from django.core import checks, exceptions
from django.db.models import Field, Func, IntegerField, Transform, Value
from django.db.models.fields.mixins import CheckFieldDefaultMixin
from django.db.models.lookups import Exact, In
from django.utils.translation import gettext_lazy as _
from ..utils import prefix_validation_error
from .utils import AttributeSetter
__all__ = ["ArrayField"]
class ArrayField(CheckFieldDefaultMixin, Field):
empty_strings_allowed = False
default_error_messages = {
"item_invalid": _("Item %(nth)s in the array did not validate:"),
"nested_array_mismatch": _("Nested arrays must have the same length."),
}
_default_hint = ("list", "[]")
def __init__(self, base_field, size=None, **kwargs):
self.base_field = base_field
self.db_collation = getattr(self.base_field, "db_collation", None)
self.size = size
if self.size:
self.default_validators = [
*self.default_validators,
ArrayMaxLengthValidator(self.size),
]
# For performance, only add a from_db_value() method if the base field
# implements it.
if hasattr(self.base_field, "from_db_value"):
self.from_db_value = self._from_db_value
super().__init__(**kwargs)
@property
def model(self):
try:
return self.__dict__["model"]
except KeyError:
raise AttributeError(
"'%s' object has no attribute 'model'" % self.__class__.__name__
)
@model.setter
def model(self, model):
self.__dict__["model"] = model
self.base_field.model = model
@classmethod
def _choices_is_value(cls, value):
return isinstance(value, (list, tuple)) or super()._choices_is_value(value)
def check(self, **kwargs):
errors = super().check(**kwargs)
if self.base_field.remote_field:
errors.append(
checks.Error(
"Base field for array cannot be a related field.",
obj=self,
id="postgres.E002",
)
)
else:
# Remove the field name checks as they are not needed here.
base_checks = self.base_field.check()
if base_checks:
error_messages = "\n ".join(
"%s (%s)" % (base_check.msg, base_check.id)
for base_check in base_checks
if isinstance(base_check, checks.Error)
)
if error_messages:
errors.append(
checks.Error(
"Base field for array has errors:\n %s" % error_messages,
obj=self,
id="postgres.E001",
)
)
warning_messages = "\n ".join(
"%s (%s)" % (base_check.msg, base_check.id)
for base_check in base_checks
if isinstance(base_check, checks.Warning)
)
if warning_messages:
errors.append(
checks.Warning(
"Base field for array has warnings:\n %s"
% warning_messages,
obj=self,
id="postgres.W004",
)
)
return errors
def set_attributes_from_name(self, name):
super().set_attributes_from_name(name)
self.base_field.set_attributes_from_name(name)
@property
def description(self):
return "Array of %s" % self.base_field.description
def db_type(self, connection):
size = self.size or ""
return "%s[%s]" % (self.base_field.db_type(connection), size)
def cast_db_type(self, connection):
size = self.size or ""
return "%s[%s]" % (self.base_field.cast_db_type(connection), size)
def db_parameters(self, connection):
db_params = super().db_parameters(connection)
db_params["collation"] = self.db_collation
return db_params
def get_placeholder(self, value, compiler, connection):
return "%s::{}".format(self.db_type(connection))
def get_db_prep_value(self, value, connection, prepared=False):
if isinstance(value, (list, tuple)):
return [
self.base_field.get_db_prep_value(i, connection, prepared=False)
for i in value
]
return value
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if path == "django.contrib.postgres.fields.array.ArrayField":
path = "django.contrib.postgres.fields.ArrayField"
kwargs.update(
{
"base_field": self.base_field.clone(),
"size": self.size,
}
)
return name, path, args, kwargs
def to_python(self, value):
if isinstance(value, str):
# Assume we're deserializing
vals = json.loads(value)
value = [self.base_field.to_python(val) for val in vals]
return value
def _from_db_value(self, value, expression, connection):
if value is None:
return value
return [
self.base_field.from_db_value(item, expression, connection)
for item in value
]
def value_to_string(self, obj):
values = []
vals = self.value_from_object(obj)
base_field = self.base_field
for val in vals:
if val is None:
values.append(None)
else:
obj = AttributeSetter(base_field.attname, val)
values.append(base_field.value_to_string(obj))
return json.dumps(values)
def get_transform(self, name):
transform = super().get_transform(name)
if transform:
return transform
if "_" not in name:
try:
index = int(name)
except ValueError:
pass
else:
index += 1 # postgres uses 1-indexing
return IndexTransformFactory(index, self.base_field)
try:
start, end = name.split("_")
start = int(start) + 1
end = int(end) # don't add one here because postgres slices are weird
except ValueError:
pass
else:
return SliceTransformFactory(start, end)
def validate(self, value, model_instance):
super().validate(value, model_instance)
for index, part in enumerate(value):
try:
self.base_field.validate(part, model_instance)
except exceptions.ValidationError as error:
raise prefix_validation_error(
error,
prefix=self.error_messages["item_invalid"],
code="item_invalid",
params={"nth": index + 1},
)
if isinstance(self.base_field, ArrayField):
if len({len(i) for i in value}) > 1:
raise exceptions.ValidationError(
self.error_messages["nested_array_mismatch"],
code="nested_array_mismatch",
)
def run_validators(self, value):
super().run_validators(value)
for index, part in enumerate(value):
try:
self.base_field.run_validators(part)
except exceptions.ValidationError as error:
raise prefix_validation_error(
error,
prefix=self.error_messages["item_invalid"],
code="item_invalid",
params={"nth": index + 1},
)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": SimpleArrayField,
"base_field": self.base_field.formfield(),
"max_length": self.size,
**kwargs,
}
)
class ArrayRHSMixin:
def __init__(self, lhs, rhs):
# Don't wrap arrays that contains only None values, psycopg doesn't
# allow this.
if isinstance(rhs, (tuple, list)) and any(self._rhs_not_none_values(rhs)):
expressions = []
for value in rhs:
if not hasattr(value, "resolve_expression"):
field = lhs.output_field
value = Value(field.base_field.get_prep_value(value))
expressions.append(value)
rhs = Func(
*expressions,
function="ARRAY",
template="%(function)s[%(expressions)s]",
)
super().__init__(lhs, rhs)
def process_rhs(self, compiler, connection):
rhs, rhs_params = super().process_rhs(compiler, connection)
cast_type = self.lhs.output_field.cast_db_type(connection)
return "%s::%s" % (rhs, cast_type), rhs_params
def _rhs_not_none_values(self, rhs):
for x in rhs:
if isinstance(x, (list, tuple)):
yield from self._rhs_not_none_values(x)
elif x is not None:
yield True
@ArrayField.register_lookup
class ArrayContains(ArrayRHSMixin, lookups.DataContains):
pass
@ArrayField.register_lookup
class ArrayContainedBy(ArrayRHSMixin, lookups.ContainedBy):
pass
@ArrayField.register_lookup
class ArrayExact(ArrayRHSMixin, Exact):
pass
@ArrayField.register_lookup
class ArrayOverlap(ArrayRHSMixin, lookups.Overlap):
pass
@ArrayField.register_lookup
class ArrayLenTransform(Transform):
lookup_name = "len"
output_field = IntegerField()
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
# Distinguish NULL and empty arrays
return (
"CASE WHEN %(lhs)s IS NULL THEN NULL ELSE "
"coalesce(array_length(%(lhs)s, 1), 0) END"
) % {"lhs": lhs}, params * 2
@ArrayField.register_lookup
class ArrayInLookup(In):
def get_prep_lookup(self):
values = super().get_prep_lookup()
if hasattr(values, "resolve_expression"):
return values
# In.process_rhs() expects values to be hashable, so convert lists
# to tuples.
prepared_values = []
for value in values:
if hasattr(value, "resolve_expression"):
prepared_values.append(value)
else:
prepared_values.append(tuple(value))
return prepared_values
class IndexTransform(Transform):
def __init__(self, index, base_field, *args, **kwargs):
super().__init__(*args, **kwargs)
self.index = index
self.base_field = base_field
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return "%s[%%s]" % lhs, params + [self.index]
@property
def output_field(self):
return self.base_field
class IndexTransformFactory:
def __init__(self, index, base_field):
self.index = index
self.base_field = base_field
def __call__(self, *args, **kwargs):
return IndexTransform(self.index, self.base_field, *args, **kwargs)
class SliceTransform(Transform):
def __init__(self, start, end, *args, **kwargs):
super().__init__(*args, **kwargs)
self.start = start
self.end = end
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return "%s[%%s:%%s]" % lhs, params + [self.start, self.end]
class SliceTransformFactory:
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, *args, **kwargs):
return SliceTransform(self.start, self.end, *args, **kwargs)
|
21b8c0402ec8756b884aa1dd057aeb6a23863f350fff8a471dc9cbf15b98651b | import datetime
import json
from django.contrib.postgres import forms, lookups
from django.db import models
from django.db.backends.postgresql.psycopg_any import (
DateRange,
DateTimeTZRange,
NumericRange,
Range,
)
from django.db.models.functions import Cast
from django.db.models.lookups import PostgresOperatorLookup
from .utils import AttributeSetter
__all__ = [
"RangeField",
"IntegerRangeField",
"BigIntegerRangeField",
"DecimalRangeField",
"DateTimeRangeField",
"DateRangeField",
"RangeBoundary",
"RangeOperators",
]
class RangeBoundary(models.Expression):
"""A class that represents range boundaries."""
def __init__(self, inclusive_lower=True, inclusive_upper=False):
self.lower = "[" if inclusive_lower else "("
self.upper = "]" if inclusive_upper else ")"
def as_sql(self, compiler, connection):
return "'%s%s'" % (self.lower, self.upper), []
class RangeOperators:
# https://www.postgresql.org/docs/current/functions-range.html#RANGE-OPERATORS-TABLE
EQUAL = "="
NOT_EQUAL = "<>"
CONTAINS = "@>"
CONTAINED_BY = "<@"
OVERLAPS = "&&"
FULLY_LT = "<<"
FULLY_GT = ">>"
NOT_LT = "&>"
NOT_GT = "&<"
ADJACENT_TO = "-|-"
class RangeField(models.Field):
empty_strings_allowed = False
def __init__(self, *args, **kwargs):
if "default_bounds" in kwargs:
raise TypeError(
f"Cannot use 'default_bounds' with {self.__class__.__name__}."
)
# Initializing base_field here ensures that its model matches the model
# for self.
if hasattr(self, "base_field"):
self.base_field = self.base_field()
super().__init__(*args, **kwargs)
@property
def model(self):
try:
return self.__dict__["model"]
except KeyError:
raise AttributeError(
"'%s' object has no attribute 'model'" % self.__class__.__name__
)
@model.setter
def model(self, model):
self.__dict__["model"] = model
self.base_field.model = model
@classmethod
def _choices_is_value(cls, value):
return isinstance(value, (list, tuple)) or super()._choices_is_value(value)
def get_placeholder(self, value, compiler, connection):
return "%s::{}".format(self.db_type(connection))
def get_prep_value(self, value):
if value is None:
return None
elif isinstance(value, Range):
return value
elif isinstance(value, (list, tuple)):
return self.range_type(value[0], value[1])
return value
def to_python(self, value):
if isinstance(value, str):
# Assume we're deserializing
vals = json.loads(value)
for end in ("lower", "upper"):
if end in vals:
vals[end] = self.base_field.to_python(vals[end])
value = self.range_type(**vals)
elif isinstance(value, (list, tuple)):
value = self.range_type(value[0], value[1])
return value
def set_attributes_from_name(self, name):
super().set_attributes_from_name(name)
self.base_field.set_attributes_from_name(name)
def value_to_string(self, obj):
value = self.value_from_object(obj)
if value is None:
return None
if value.isempty:
return json.dumps({"empty": True})
base_field = self.base_field
result = {"bounds": value._bounds}
for end in ("lower", "upper"):
val = getattr(value, end)
if val is None:
result[end] = None
else:
obj = AttributeSetter(base_field.attname, val)
result[end] = base_field.value_to_string(obj)
return json.dumps(result)
def formfield(self, **kwargs):
kwargs.setdefault("form_class", self.form_field)
return super().formfield(**kwargs)
CANONICAL_RANGE_BOUNDS = "[)"
class ContinuousRangeField(RangeField):
"""
Continuous range field. It allows specifying default bounds for list and
tuple inputs.
"""
def __init__(self, *args, default_bounds=CANONICAL_RANGE_BOUNDS, **kwargs):
if default_bounds not in ("[)", "(]", "()", "[]"):
raise ValueError("default_bounds must be one of '[)', '(]', '()', or '[]'.")
self.default_bounds = default_bounds
super().__init__(*args, **kwargs)
def get_prep_value(self, value):
if isinstance(value, (list, tuple)):
return self.range_type(value[0], value[1], self.default_bounds)
return super().get_prep_value(value)
def formfield(self, **kwargs):
kwargs.setdefault("default_bounds", self.default_bounds)
return super().formfield(**kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.default_bounds and self.default_bounds != CANONICAL_RANGE_BOUNDS:
kwargs["default_bounds"] = self.default_bounds
return name, path, args, kwargs
class IntegerRangeField(RangeField):
base_field = models.IntegerField
range_type = NumericRange
form_field = forms.IntegerRangeField
def db_type(self, connection):
return "int4range"
class BigIntegerRangeField(RangeField):
base_field = models.BigIntegerField
range_type = NumericRange
form_field = forms.IntegerRangeField
def db_type(self, connection):
return "int8range"
class DecimalRangeField(ContinuousRangeField):
base_field = models.DecimalField
range_type = NumericRange
form_field = forms.DecimalRangeField
def db_type(self, connection):
return "numrange"
class DateTimeRangeField(ContinuousRangeField):
base_field = models.DateTimeField
range_type = DateTimeTZRange
form_field = forms.DateTimeRangeField
def db_type(self, connection):
return "tstzrange"
class DateRangeField(RangeField):
base_field = models.DateField
range_type = DateRange
form_field = forms.DateRangeField
def db_type(self, connection):
return "daterange"
class RangeContains(lookups.DataContains):
def get_prep_lookup(self):
if not isinstance(self.rhs, (list, tuple, Range)):
return Cast(self.rhs, self.lhs.field.base_field)
return super().get_prep_lookup()
RangeField.register_lookup(RangeContains)
RangeField.register_lookup(lookups.ContainedBy)
RangeField.register_lookup(lookups.Overlap)
class DateTimeRangeContains(PostgresOperatorLookup):
"""
Lookup for Date/DateTimeRange containment to cast the rhs to the correct
type.
"""
lookup_name = "contains"
postgres_operator = RangeOperators.CONTAINS
def process_rhs(self, compiler, connection):
# Transform rhs value for db lookup.
if isinstance(self.rhs, datetime.date):
value = models.Value(self.rhs)
self.rhs = value.resolve_expression(compiler.query)
return super().process_rhs(compiler, connection)
def as_postgresql(self, compiler, connection):
sql, params = super().as_postgresql(compiler, connection)
# Cast the rhs if needed.
cast_sql = ""
if (
isinstance(self.rhs, models.Expression)
and self.rhs._output_field_or_none
and
# Skip cast if rhs has a matching range type.
not isinstance(
self.rhs._output_field_or_none, self.lhs.output_field.__class__
)
):
cast_internal_type = self.lhs.output_field.base_field.get_internal_type()
cast_sql = "::{}".format(connection.data_types.get(cast_internal_type))
return "%s%s" % (sql, cast_sql), params
DateRangeField.register_lookup(DateTimeRangeContains)
DateTimeRangeField.register_lookup(DateTimeRangeContains)
class RangeContainedBy(PostgresOperatorLookup):
lookup_name = "contained_by"
type_mapping = {
"smallint": "int4range",
"integer": "int4range",
"bigint": "int8range",
"double precision": "numrange",
"numeric": "numrange",
"date": "daterange",
"timestamp with time zone": "tstzrange",
}
postgres_operator = RangeOperators.CONTAINED_BY
def process_rhs(self, compiler, connection):
rhs, rhs_params = super().process_rhs(compiler, connection)
# Ignore precision for DecimalFields.
db_type = self.lhs.output_field.cast_db_type(connection).split("(")[0]
cast_type = self.type_mapping[db_type]
return "%s::%s" % (rhs, cast_type), rhs_params
def process_lhs(self, compiler, connection):
lhs, lhs_params = super().process_lhs(compiler, connection)
if isinstance(self.lhs.output_field, models.FloatField):
lhs = "%s::numeric" % lhs
elif isinstance(self.lhs.output_field, models.SmallIntegerField):
lhs = "%s::integer" % lhs
return lhs, lhs_params
def get_prep_lookup(self):
return RangeField().get_prep_value(self.rhs)
models.DateField.register_lookup(RangeContainedBy)
models.DateTimeField.register_lookup(RangeContainedBy)
models.IntegerField.register_lookup(RangeContainedBy)
models.FloatField.register_lookup(RangeContainedBy)
models.DecimalField.register_lookup(RangeContainedBy)
@RangeField.register_lookup
class FullyLessThan(PostgresOperatorLookup):
lookup_name = "fully_lt"
postgres_operator = RangeOperators.FULLY_LT
@RangeField.register_lookup
class FullGreaterThan(PostgresOperatorLookup):
lookup_name = "fully_gt"
postgres_operator = RangeOperators.FULLY_GT
@RangeField.register_lookup
class NotLessThan(PostgresOperatorLookup):
lookup_name = "not_lt"
postgres_operator = RangeOperators.NOT_LT
@RangeField.register_lookup
class NotGreaterThan(PostgresOperatorLookup):
lookup_name = "not_gt"
postgres_operator = RangeOperators.NOT_GT
@RangeField.register_lookup
class AdjacentToLookup(PostgresOperatorLookup):
lookup_name = "adjacent_to"
postgres_operator = RangeOperators.ADJACENT_TO
@RangeField.register_lookup
class RangeStartsWith(models.Transform):
lookup_name = "startswith"
function = "lower"
@property
def output_field(self):
return self.lhs.output_field.base_field
@RangeField.register_lookup
class RangeEndsWith(models.Transform):
lookup_name = "endswith"
function = "upper"
@property
def output_field(self):
return self.lhs.output_field.base_field
@RangeField.register_lookup
class IsEmpty(models.Transform):
lookup_name = "isempty"
function = "isempty"
output_field = models.BooleanField()
@RangeField.register_lookup
class LowerInclusive(models.Transform):
lookup_name = "lower_inc"
function = "LOWER_INC"
output_field = models.BooleanField()
@RangeField.register_lookup
class LowerInfinite(models.Transform):
lookup_name = "lower_inf"
function = "LOWER_INF"
output_field = models.BooleanField()
@RangeField.register_lookup
class UpperInclusive(models.Transform):
lookup_name = "upper_inc"
function = "UPPER_INC"
output_field = models.BooleanField()
@RangeField.register_lookup
class UpperInfinite(models.Transform):
lookup_name = "upper_inf"
function = "UPPER_INF"
output_field = models.BooleanField()
|
70a01e5af44848f2d720f866d82e7b2eef46a08940775c62471ce7b3af577a10 | from django import forms
from django.core import exceptions
from django.db.backends.postgresql.psycopg_any import (
DateRange,
DateTimeTZRange,
NumericRange,
)
from django.forms.widgets import HiddenInput, MultiWidget
from django.utils.translation import gettext_lazy as _
__all__ = [
"BaseRangeField",
"IntegerRangeField",
"DecimalRangeField",
"DateTimeRangeField",
"DateRangeField",
"HiddenRangeWidget",
"RangeWidget",
]
class RangeWidget(MultiWidget):
def __init__(self, base_widget, attrs=None):
widgets = (base_widget, base_widget)
super().__init__(widgets, attrs)
def decompress(self, value):
if value:
return (value.lower, value.upper)
return (None, None)
class HiddenRangeWidget(RangeWidget):
"""A widget that splits input into two <input type="hidden"> inputs."""
def __init__(self, attrs=None):
super().__init__(HiddenInput, attrs)
class BaseRangeField(forms.MultiValueField):
default_error_messages = {
"invalid": _("Enter two valid values."),
"bound_ordering": _(
"The start of the range must not exceed the end of the range."
),
}
hidden_widget = HiddenRangeWidget
def __init__(self, **kwargs):
if "widget" not in kwargs:
kwargs["widget"] = RangeWidget(self.base_field.widget)
if "fields" not in kwargs:
kwargs["fields"] = [
self.base_field(required=False),
self.base_field(required=False),
]
kwargs.setdefault("required", False)
kwargs.setdefault("require_all_fields", False)
self.range_kwargs = {}
if default_bounds := kwargs.pop("default_bounds", None):
self.range_kwargs = {"bounds": default_bounds}
super().__init__(**kwargs)
def prepare_value(self, value):
lower_base, upper_base = self.fields
if isinstance(value, self.range_type):
return [
lower_base.prepare_value(value.lower),
upper_base.prepare_value(value.upper),
]
if value is None:
return [
lower_base.prepare_value(None),
upper_base.prepare_value(None),
]
return value
def compress(self, values):
if not values:
return None
lower, upper = values
if lower is not None and upper is not None and lower > upper:
raise exceptions.ValidationError(
self.error_messages["bound_ordering"],
code="bound_ordering",
)
try:
range_value = self.range_type(lower, upper, **self.range_kwargs)
except TypeError:
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
)
else:
return range_value
class IntegerRangeField(BaseRangeField):
default_error_messages = {"invalid": _("Enter two whole numbers.")}
base_field = forms.IntegerField
range_type = NumericRange
class DecimalRangeField(BaseRangeField):
default_error_messages = {"invalid": _("Enter two numbers.")}
base_field = forms.DecimalField
range_type = NumericRange
class DateTimeRangeField(BaseRangeField):
default_error_messages = {"invalid": _("Enter two valid date/times.")}
base_field = forms.DateTimeField
range_type = DateTimeTZRange
class DateRangeField(BaseRangeField):
default_error_messages = {"invalid": _("Enter two valid dates.")}
base_field = forms.DateField
range_type = DateRange
|
41f93c237544cfa237b5d7cdc399b6ee24cb511dbf4a4b1efbc384e66853455a | import json
import warnings
from django.contrib.postgres.fields import ArrayField
from django.db.models import Aggregate, BooleanField, JSONField, TextField, Value
from django.utils.deprecation import RemovedInDjango50Warning, RemovedInDjango51Warning
from .mixins import OrderableAggMixin
__all__ = [
"ArrayAgg",
"BitAnd",
"BitOr",
"BitXor",
"BoolAnd",
"BoolOr",
"JSONBAgg",
"StringAgg",
]
# RemovedInDjango50Warning
NOT_PROVIDED = object()
class DeprecatedConvertValueMixin:
def __init__(self, *expressions, default=NOT_PROVIDED, **extra):
if default is NOT_PROVIDED:
default = None
self._default_provided = False
else:
self._default_provided = True
super().__init__(*expressions, default=default, **extra)
def resolve_expression(self, *args, **kwargs):
resolved = super().resolve_expression(*args, **kwargs)
if not self._default_provided:
resolved.empty_result_set_value = getattr(
self, "deprecation_empty_result_set_value", self.deprecation_value
)
return resolved
def convert_value(self, value, expression, connection):
if value is None and not self._default_provided:
warnings.warn(self.deprecation_msg, category=RemovedInDjango50Warning)
return self.deprecation_value
return value
class ArrayAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate):
function = "ARRAY_AGG"
template = "%(function)s(%(distinct)s%(expressions)s %(ordering)s)"
allow_distinct = True
# RemovedInDjango50Warning
deprecation_value = property(lambda self: [])
deprecation_msg = (
"In Django 5.0, ArrayAgg() will return None instead of an empty list "
"if there are no rows. Pass default=None to opt into the new behavior "
"and silence this warning or default=[] to keep the previous behavior."
)
@property
def output_field(self):
return ArrayField(self.source_expressions[0].output_field)
class BitAnd(Aggregate):
function = "BIT_AND"
class BitOr(Aggregate):
function = "BIT_OR"
class BitXor(Aggregate):
function = "BIT_XOR"
class BoolAnd(Aggregate):
function = "BOOL_AND"
output_field = BooleanField()
class BoolOr(Aggregate):
function = "BOOL_OR"
output_field = BooleanField()
class JSONBAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate):
function = "JSONB_AGG"
template = "%(function)s(%(distinct)s%(expressions)s %(ordering)s)"
allow_distinct = True
output_field = JSONField()
# RemovedInDjango50Warning
deprecation_value = "[]"
deprecation_empty_result_set_value = property(lambda self: [])
deprecation_msg = (
"In Django 5.0, JSONBAgg() will return None instead of an empty list "
"if there are no rows. Pass default=None to opt into the new behavior "
"and silence this warning or default=[] to keep the previous "
"behavior."
)
# RemovedInDjango51Warning: When the deprecation ends, remove __init__().
#
# RemovedInDjango50Warning: When the deprecation ends, replace with:
# def __init__(self, *expressions, default=None, **extra):
def __init__(self, *expressions, default=NOT_PROVIDED, **extra):
super().__init__(*expressions, default=default, **extra)
if (
isinstance(default, Value)
and isinstance(default.value, str)
and not isinstance(default.output_field, JSONField)
):
value = default.value
try:
decoded = json.loads(value)
except json.JSONDecodeError:
warnings.warn(
"Passing a Value() with an output_field that isn't a JSONField as "
"JSONBAgg(default) is deprecated. Pass default="
f"Value({value!r}, output_field=JSONField()) instead.",
stacklevel=2,
category=RemovedInDjango51Warning,
)
self.default.output_field = self.output_field
else:
self.default = Value(decoded, self.output_field)
warnings.warn(
"Passing an encoded JSON string as JSONBAgg(default) is "
f"deprecated. Pass default={decoded!r} instead.",
stacklevel=2,
category=RemovedInDjango51Warning,
)
class StringAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate):
function = "STRING_AGG"
template = "%(function)s(%(distinct)s%(expressions)s %(ordering)s)"
allow_distinct = True
output_field = TextField()
# RemovedInDjango50Warning
deprecation_value = ""
deprecation_msg = (
"In Django 5.0, StringAgg() will return None instead of an empty "
"string if there are no rows. Pass default=None to opt into the new "
'behavior and silence this warning or default="" to keep the previous behavior.'
)
def __init__(self, expression, delimiter, **extra):
delimiter_expr = Value(str(delimiter))
super().__init__(expression, delimiter_expr, **extra)
|
4867230d9189b07606ef447f546ec45f237369afa6576c686df3fa172e8a37b7 | """
Creates permissions for all installed apps that need permissions.
"""
import getpass
import unicodedata
from django.apps import apps as global_apps
from django.contrib.auth import get_permission_codename
from django.contrib.contenttypes.management import create_contenttypes
from django.core import exceptions
from django.db import DEFAULT_DB_ALIAS, router
def _get_all_permissions(opts):
"""
Return (codename, name) for all permissions in the given opts.
"""
return [*_get_builtin_permissions(opts), *opts.permissions]
def _get_builtin_permissions(opts):
"""
Return (codename, name) for all autogenerated permissions.
By default, this is ('add', 'change', 'delete', 'view')
"""
perms = []
for action in opts.default_permissions:
perms.append(
(
get_permission_codename(action, opts),
"Can %s %s" % (action, opts.verbose_name_raw),
)
)
return perms
def create_permissions(
app_config,
verbosity=2,
interactive=True,
using=DEFAULT_DB_ALIAS,
apps=global_apps,
**kwargs,
):
if not app_config.models_module:
return
# Ensure that contenttypes are created for this app. Needed if
# 'django.contrib.auth' is in INSTALLED_APPS before
# 'django.contrib.contenttypes'.
create_contenttypes(
app_config,
verbosity=verbosity,
interactive=interactive,
using=using,
apps=apps,
**kwargs,
)
app_label = app_config.label
try:
app_config = apps.get_app_config(app_label)
ContentType = apps.get_model("contenttypes", "ContentType")
Permission = apps.get_model("auth", "Permission")
except LookupError:
return
if not router.allow_migrate_model(using, Permission):
return
# This will hold the permissions we're looking for as
# (content_type, (codename, name))
searched_perms = []
# The codenames and ctypes that should exist.
ctypes = set()
for klass in app_config.get_models():
# Force looking up the content types in the current database
# before creating foreign keys to them.
ctype = ContentType.objects.db_manager(using).get_for_model(
klass, for_concrete_model=False
)
ctypes.add(ctype)
for perm in _get_all_permissions(klass._meta):
searched_perms.append((ctype, perm))
# Find all the Permissions that have a content_type for a model we're
# looking for. We don't need to check for codenames since we already have
# a list of the ones we're going to create.
all_perms = set(
Permission.objects.using(using)
.filter(
content_type__in=ctypes,
)
.values_list("content_type", "codename")
)
perms = []
for ct, (codename, name) in searched_perms:
if (ct.pk, codename) not in all_perms:
permission = Permission()
permission._state.db = using
permission.codename = codename
permission.name = name
permission.content_type = ct
perms.append(permission)
Permission.objects.using(using).bulk_create(perms)
if verbosity >= 2:
for perm in perms:
print("Adding permission '%s'" % perm)
def get_system_username():
"""
Return the current system user's username, or an empty string if the
username could not be determined.
"""
try:
result = getpass.getuser()
except (ImportError, KeyError):
# KeyError will be raised by os.getpwuid() (called by getuser())
# if there is no corresponding entry in the /etc/passwd file
# (a very restricted chroot environment, for example).
return ""
return result
def get_default_username(check_db=True, database=DEFAULT_DB_ALIAS):
"""
Try to determine the current system user's username to use as a default.
:param check_db: If ``True``, requires that the username does not match an
existing ``auth.User`` (otherwise returns an empty string).
:param database: The database where the unique check will be performed.
:returns: The username, or an empty string if no username can be
determined or the suggested username is already taken.
"""
# This file is used in apps.py, it should not trigger models import.
from django.contrib.auth import models as auth_app
# If the User model has been swapped out, we can't make any assumptions
# about the default user name.
if auth_app.User._meta.swapped:
return ""
default_username = get_system_username()
try:
default_username = (
unicodedata.normalize("NFKD", default_username)
.encode("ascii", "ignore")
.decode("ascii")
.replace(" ", "")
.lower()
)
except UnicodeDecodeError:
return ""
# Run the username validator
try:
auth_app.User._meta.get_field("username").run_validators(default_username)
except exceptions.ValidationError:
return ""
# Don't return the default username if it is already taken.
if check_db and default_username:
try:
auth_app.User._default_manager.db_manager(database).get(
username=default_username,
)
except auth_app.User.DoesNotExist:
pass
else:
return ""
return default_username
|
276f48519dc74df7a97e8889bd278ba037f3cb2e867fa1e8a3f63e6d350c3b7a | import logging
import warnings
from django.conf import settings
from django.contrib.gis import gdal
from django.contrib.gis.geometry import json_regex
from django.contrib.gis.geos import GEOSException, GEOSGeometry
from django.forms.widgets import Widget
from django.utils import translation
from django.utils.deprecation import RemovedInDjango51Warning
logger = logging.getLogger("django.contrib.gis")
class BaseGeometryWidget(Widget):
"""
The base class for rich geometry widgets.
Render a map using the WKT of the geometry.
"""
geom_type = "GEOMETRY"
map_srid = 4326
map_width = 600 # RemovedInDjango51Warning
map_height = 400 # RemovedInDjango51Warning
display_raw = False
supports_3d = False
template_name = "" # set on subclasses
def __init__(self, attrs=None):
self.attrs = {}
for key in ("geom_type", "map_srid", "map_width", "map_height", "display_raw"):
self.attrs[key] = getattr(self, key)
if (
(attrs and ("map_width" in attrs or "map_height" in attrs))
or self.map_width != 600
or self.map_height != 400
):
warnings.warn(
"The map_height and map_width widget attributes are deprecated. Please "
"use CSS to size map widgets.",
category=RemovedInDjango51Warning,
stacklevel=2,
)
if attrs:
self.attrs.update(attrs)
def serialize(self, value):
return value.wkt if value else ""
def deserialize(self, value):
try:
return GEOSGeometry(value)
except (GEOSException, ValueError, TypeError) as err:
logger.error("Error creating geometry from value '%s' (%s)", value, err)
return None
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
# If a string reaches here (via a validation error on another
# field) then just reconstruct the Geometry.
if value and isinstance(value, str):
value = self.deserialize(value)
if value:
# Check that srid of value and map match
if value.srid and value.srid != self.map_srid:
try:
ogr = value.ogr
ogr.transform(self.map_srid)
value = ogr
except gdal.GDALException as err:
logger.error(
"Error transforming geometry from srid '%s' to srid '%s' (%s)",
value.srid,
self.map_srid,
err,
)
geom_type = gdal.OGRGeomType(self.attrs["geom_type"]).name
context.update(
self.build_attrs(
self.attrs,
{
"name": name,
"module": "geodjango_%s" % name.replace("-", "_"), # JS-safe
"serialized": self.serialize(value),
"geom_type": "Geometry" if geom_type == "Unknown" else geom_type,
"STATIC_URL": settings.STATIC_URL,
"LANGUAGE_BIDI": translation.get_language_bidi(),
**(attrs or {}),
},
)
)
return context
class OpenLayersWidget(BaseGeometryWidget):
template_name = "gis/openlayers.html"
map_srid = 3857
class Media:
css = {
"all": (
"https://cdn.jsdelivr.net/npm/[email protected]/ol.css",
"gis/css/ol3.css",
)
}
js = (
"https://cdn.jsdelivr.net/npm/[email protected]/dist/ol.js",
"gis/js/OLMapWidget.js",
)
def serialize(self, value):
return value.json if value else ""
def deserialize(self, value):
geom = super().deserialize(value)
# GeoJSON assumes WGS84 (4326). Use the map's SRID instead.
if geom and json_regex.match(value) and self.map_srid != 4326:
geom.srid = self.map_srid
return geom
class OSMWidget(OpenLayersWidget):
"""
An OpenLayers/OpenStreetMap-based widget.
"""
template_name = "gis/openlayers-osm.html"
default_lon = 5
default_lat = 47
default_zoom = 12
def __init__(self, attrs=None):
super().__init__()
for key in ("default_lon", "default_lat", "default_zoom"):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
|
115444d1af722271c8ecdb80924784baee8c0dc7b3c3d270e3de67722375d513 | import logging
import os
import re
from ctypes import CDLL, CFUNCTYPE, c_char_p, c_int
from ctypes.util import find_library
from django.contrib.gis.gdal.error import GDALException
from django.core.exceptions import ImproperlyConfigured
logger = logging.getLogger("django.contrib.gis")
# Custom library path set?
try:
from django.conf import settings
lib_path = settings.GDAL_LIBRARY_PATH
except (AttributeError, ImportError, ImproperlyConfigured, OSError):
lib_path = None
if lib_path:
lib_names = None
elif os.name == "nt":
# Windows NT shared libraries
lib_names = [
"gdal306",
"gdal305",
"gdal304",
"gdal303",
"gdal302",
"gdal301",
"gdal300",
"gdal204",
"gdal203",
"gdal202",
]
elif os.name == "posix":
# *NIX library names.
lib_names = [
"gdal",
"GDAL",
"gdal3.6.0",
"gdal3.5.0",
"gdal3.4.0",
"gdal3.3.0",
"gdal3.2.0",
"gdal3.1.0",
"gdal3.0.0",
"gdal2.4.0",
"gdal2.3.0",
"gdal2.2.0",
]
else:
raise ImproperlyConfigured('GDAL is unsupported on OS "%s".' % os.name)
# Using the ctypes `find_library` utility to find the
# path to the GDAL library from the list of library names.
if lib_names:
for lib_name in lib_names:
lib_path = find_library(lib_name)
if lib_path is not None:
break
if lib_path is None:
raise ImproperlyConfigured(
'Could not find the GDAL library (tried "%s"). Is GDAL installed? '
"If it is, try setting GDAL_LIBRARY_PATH in your settings."
% '", "'.join(lib_names)
)
# This loads the GDAL/OGR C library
lgdal = CDLL(lib_path)
# On Windows, the GDAL binaries have some OSR routines exported with
# STDCALL, while others are not. Thus, the library will also need to
# be loaded up as WinDLL for said OSR functions that require the
# different calling convention.
if os.name == "nt":
from ctypes import WinDLL
lwingdal = WinDLL(lib_path)
def std_call(func):
"""
Return the correct STDCALL function for certain OSR routines on Win32
platforms.
"""
if os.name == "nt":
return lwingdal[func]
else:
return lgdal[func]
# #### Version-information functions. ####
# Return GDAL library version information with the given key.
_version_info = std_call("GDALVersionInfo")
_version_info.argtypes = [c_char_p]
_version_info.restype = c_char_p
def gdal_version():
"Return only the GDAL version number information."
return _version_info(b"RELEASE_NAME")
def gdal_full_version():
"Return the full GDAL version information."
return _version_info(b"")
def gdal_version_info():
ver = gdal_version()
m = re.match(rb"^(?P<major>\d+)\.(?P<minor>\d+)(?:\.(?P<subminor>\d+))?", ver)
if not m:
raise GDALException('Could not parse GDAL version string "%s"' % ver)
major, minor, subminor = m.groups()
return (int(major), int(minor), subminor and int(subminor))
GDAL_VERSION = gdal_version_info()
# Set library error handling so as errors are logged
CPLErrorHandler = CFUNCTYPE(None, c_int, c_int, c_char_p)
def err_handler(error_class, error_number, message):
logger.error("GDAL_ERROR %d: %s", error_number, message)
err_handler = CPLErrorHandler(err_handler)
def function(name, args, restype):
func = std_call(name)
func.argtypes = args
func.restype = restype
return func
set_error_handler = function("CPLSetErrorHandler", [CPLErrorHandler], CPLErrorHandler)
set_error_handler(err_handler)
|
489c5c15a0ba3bc14a4a506f46a5e4c61c9246f456b52ce52b0c75c1cba01381 | from decimal import Decimal
from django.contrib.gis.db.models.fields import BaseSpatialField, GeometryField
from django.contrib.gis.db.models.sql import AreaField, DistanceField
from django.contrib.gis.geos import GEOSGeometry
from django.core.exceptions import FieldError
from django.db import NotSupportedError
from django.db.models import (
BinaryField,
BooleanField,
FloatField,
Func,
IntegerField,
TextField,
Transform,
Value,
)
from django.db.models.functions import Cast
from django.utils.functional import cached_property
NUMERIC_TYPES = (int, float, Decimal)
class GeoFuncMixin:
function = None
geom_param_pos = (0,)
def __init__(self, *expressions, **extra):
super().__init__(*expressions, **extra)
# Ensure that value expressions are geometric.
for pos in self.geom_param_pos:
expr = self.source_expressions[pos]
if not isinstance(expr, Value):
continue
try:
output_field = expr.output_field
except FieldError:
output_field = None
geom = expr.value
if (
not isinstance(geom, GEOSGeometry)
or output_field
and not isinstance(output_field, GeometryField)
):
raise TypeError(
"%s function requires a geometric argument in position %d."
% (self.name, pos + 1)
)
if not geom.srid and not output_field:
raise ValueError("SRID is required for all geometries.")
if not output_field:
self.source_expressions[pos] = Value(
geom, output_field=GeometryField(srid=geom.srid)
)
@property
def name(self):
return self.__class__.__name__
@cached_property
def geo_field(self):
return self.source_expressions[self.geom_param_pos[0]].field
def as_sql(self, compiler, connection, function=None, **extra_context):
if self.function is None and function is None:
function = connection.ops.spatial_function_name(self.name)
return super().as_sql(compiler, connection, function=function, **extra_context)
def resolve_expression(self, *args, **kwargs):
res = super().resolve_expression(*args, **kwargs)
# Ensure that expressions are geometric.
source_fields = res.get_source_fields()
for pos in self.geom_param_pos:
field = source_fields[pos]
if not isinstance(field, GeometryField):
raise TypeError(
"%s function requires a GeometryField in position %s, got %s."
% (
self.name,
pos + 1,
type(field).__name__,
)
)
base_srid = res.geo_field.srid
for pos in self.geom_param_pos[1:]:
expr = res.source_expressions[pos]
expr_srid = expr.output_field.srid
if expr_srid != base_srid:
# Automatic SRID conversion so objects are comparable.
res.source_expressions[pos] = Transform(
expr, base_srid
).resolve_expression(*args, **kwargs)
return res
def _handle_param(self, value, param_name="", check_types=None):
if not hasattr(value, "resolve_expression"):
if check_types and not isinstance(value, check_types):
raise TypeError(
"The %s parameter has the wrong type: should be %s."
% (param_name, check_types)
)
return value
class GeoFunc(GeoFuncMixin, Func):
pass
class GeomOutputGeoFunc(GeoFunc):
@cached_property
def output_field(self):
return GeometryField(srid=self.geo_field.srid)
class SQLiteDecimalToFloatMixin:
"""
By default, Decimal values are converted to str by the SQLite backend, which
is not acceptable by the GIS functions expecting numeric values.
"""
def as_sqlite(self, compiler, connection, **extra_context):
copy = self.copy()
copy.set_source_expressions(
[
Value(float(expr.value))
if hasattr(expr, "value") and isinstance(expr.value, Decimal)
else expr
for expr in copy.get_source_expressions()
]
)
return copy.as_sql(compiler, connection, **extra_context)
class OracleToleranceMixin:
tolerance = 0.05
def as_oracle(self, compiler, connection, **extra_context):
tolerance = Value(
self._handle_param(
self.extra.get("tolerance", self.tolerance),
"tolerance",
NUMERIC_TYPES,
)
)
clone = self.copy()
clone.set_source_expressions([*self.get_source_expressions(), tolerance])
return clone.as_sql(compiler, connection, **extra_context)
class Area(OracleToleranceMixin, GeoFunc):
arity = 1
@cached_property
def output_field(self):
return AreaField(self.geo_field)
def as_sql(self, compiler, connection, **extra_context):
if not connection.features.supports_area_geodetic and self.geo_field.geodetic(
connection
):
raise NotSupportedError(
"Area on geodetic coordinate systems not supported."
)
return super().as_sql(compiler, connection, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
extra_context["template"] = "%(function)s(%(expressions)s, %(spheroid)d)"
extra_context["spheroid"] = True
return self.as_sql(compiler, connection, **extra_context)
class Azimuth(GeoFunc):
output_field = FloatField()
arity = 2
geom_param_pos = (0, 1)
class AsGeoJSON(GeoFunc):
output_field = TextField()
def __init__(self, expression, bbox=False, crs=False, precision=8, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, "precision", int))
options = 0
if crs and bbox:
options = 3
elif bbox:
options = 1
elif crs:
options = 2
if options:
expressions.append(options)
super().__init__(*expressions, **extra)
def as_oracle(self, compiler, connection, **extra_context):
source_expressions = self.get_source_expressions()
clone = self.copy()
clone.set_source_expressions(source_expressions[:1])
return super(AsGeoJSON, clone).as_sql(compiler, connection, **extra_context)
class AsGML(GeoFunc):
geom_param_pos = (1,)
output_field = TextField()
def __init__(self, expression, version=2, precision=8, **extra):
expressions = [version, expression]
if precision is not None:
expressions.append(self._handle_param(precision, "precision", int))
super().__init__(*expressions, **extra)
def as_oracle(self, compiler, connection, **extra_context):
source_expressions = self.get_source_expressions()
version = source_expressions[0]
clone = self.copy()
clone.set_source_expressions([source_expressions[1]])
extra_context["function"] = (
"SDO_UTIL.TO_GML311GEOMETRY"
if version.value == 3
else "SDO_UTIL.TO_GMLGEOMETRY"
)
return super(AsGML, clone).as_sql(compiler, connection, **extra_context)
class AsKML(GeoFunc):
output_field = TextField()
def __init__(self, expression, precision=8, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, "precision", int))
super().__init__(*expressions, **extra)
class AsSVG(GeoFunc):
output_field = TextField()
def __init__(self, expression, relative=False, precision=8, **extra):
relative = (
relative if hasattr(relative, "resolve_expression") else int(relative)
)
expressions = [
expression,
relative,
self._handle_param(precision, "precision", int),
]
super().__init__(*expressions, **extra)
class AsWKB(GeoFunc):
output_field = BinaryField()
arity = 1
class AsWKT(GeoFunc):
output_field = TextField()
arity = 1
class BoundingCircle(OracleToleranceMixin, GeomOutputGeoFunc):
def __init__(self, expression, num_seg=48, **extra):
super().__init__(expression, num_seg, **extra)
def as_oracle(self, compiler, connection, **extra_context):
clone = self.copy()
clone.set_source_expressions([self.get_source_expressions()[0]])
return super(BoundingCircle, clone).as_oracle(
compiler, connection, **extra_context
)
class Centroid(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 1
class Difference(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
class DistanceResultMixin:
@cached_property
def output_field(self):
return DistanceField(self.geo_field)
def source_is_geography(self):
return self.geo_field.geography and self.geo_field.srid == 4326
class Distance(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
geom_param_pos = (0, 1)
spheroid = None
def __init__(self, expr1, expr2, spheroid=None, **extra):
expressions = [expr1, expr2]
if spheroid is not None:
self.spheroid = self._handle_param(spheroid, "spheroid", bool)
super().__init__(*expressions, **extra)
def as_postgresql(self, compiler, connection, **extra_context):
clone = self.copy()
function = None
expr2 = clone.source_expressions[1]
geography = self.source_is_geography()
if expr2.output_field.geography != geography:
if isinstance(expr2, Value):
expr2.output_field.geography = geography
else:
clone.source_expressions[1] = Cast(
expr2,
GeometryField(srid=expr2.output_field.srid, geography=geography),
)
if not geography and self.geo_field.geodetic(connection):
# Geometry fields with geodetic (lon/lat) coordinates need special
# distance functions.
if self.spheroid:
# DistanceSpheroid is more accurate and resource intensive than
# DistanceSphere.
function = connection.ops.spatial_function_name("DistanceSpheroid")
# Replace boolean param by the real spheroid of the base field
clone.source_expressions.append(
Value(self.geo_field.spheroid(connection))
)
else:
function = connection.ops.spatial_function_name("DistanceSphere")
return super(Distance, clone).as_sql(
compiler, connection, function=function, **extra_context
)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
# SpatiaLite returns NULL instead of zero on geodetic coordinates
extra_context[
"template"
] = "COALESCE(%(function)s(%(expressions)s, %(spheroid)s), 0)"
extra_context["spheroid"] = int(bool(self.spheroid))
return super().as_sql(compiler, connection, **extra_context)
class Envelope(GeomOutputGeoFunc):
arity = 1
class ForcePolygonCW(GeomOutputGeoFunc):
arity = 1
class GeoHash(GeoFunc):
output_field = TextField()
def __init__(self, expression, precision=None, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, "precision", int))
super().__init__(*expressions, **extra)
def as_mysql(self, compiler, connection, **extra_context):
clone = self.copy()
# If no precision is provided, set it to the maximum.
if len(clone.source_expressions) < 2:
clone.source_expressions.append(Value(100))
return clone.as_sql(compiler, connection, **extra_context)
class GeometryDistance(GeoFunc):
output_field = FloatField()
arity = 2
function = ""
arg_joiner = " <-> "
geom_param_pos = (0, 1)
class Intersection(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
@BaseSpatialField.register_lookup
class IsEmpty(GeoFuncMixin, Transform):
lookup_name = "isempty"
output_field = BooleanField()
@BaseSpatialField.register_lookup
class IsValid(OracleToleranceMixin, GeoFuncMixin, Transform):
lookup_name = "isvalid"
output_field = BooleanField()
def as_oracle(self, compiler, connection, **extra_context):
sql, params = super().as_oracle(compiler, connection, **extra_context)
return "CASE %s WHEN 'TRUE' THEN 1 ELSE 0 END" % sql, params
class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
def __init__(self, expr1, spheroid=True, **extra):
self.spheroid = spheroid
super().__init__(expr1, **extra)
def as_sql(self, compiler, connection, **extra_context):
if (
self.geo_field.geodetic(connection)
and not connection.features.supports_length_geodetic
):
raise NotSupportedError(
"This backend doesn't support Length on geodetic fields"
)
return super().as_sql(compiler, connection, **extra_context)
def as_postgresql(self, compiler, connection, **extra_context):
clone = self.copy()
function = None
if self.source_is_geography():
clone.source_expressions.append(Value(self.spheroid))
elif self.geo_field.geodetic(connection):
# Geometry fields with geodetic (lon/lat) coordinates need length_spheroid
function = connection.ops.spatial_function_name("LengthSpheroid")
clone.source_expressions.append(Value(self.geo_field.spheroid(connection)))
else:
dim = min(f.dim for f in self.get_source_fields() if f)
if dim > 2:
function = connection.ops.length3d
return super(Length, clone).as_sql(
compiler, connection, function=function, **extra_context
)
def as_sqlite(self, compiler, connection, **extra_context):
function = None
if self.geo_field.geodetic(connection):
function = "GeodesicLength" if self.spheroid else "GreatCircleLength"
return super().as_sql(compiler, connection, function=function, **extra_context)
class LineLocatePoint(GeoFunc):
output_field = FloatField()
arity = 2
geom_param_pos = (0, 1)
class MakeValid(GeomOutputGeoFunc):
pass
class MemSize(GeoFunc):
output_field = IntegerField()
arity = 1
class NumGeometries(GeoFunc):
output_field = IntegerField()
arity = 1
class NumPoints(GeoFunc):
output_field = IntegerField()
arity = 1
class Perimeter(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
arity = 1
def as_postgresql(self, compiler, connection, **extra_context):
function = None
if self.geo_field.geodetic(connection) and not self.source_is_geography():
raise NotSupportedError(
"ST_Perimeter cannot use a non-projected non-geography field."
)
dim = min(f.dim for f in self.get_source_fields())
if dim > 2:
function = connection.ops.perimeter3d
return super().as_sql(compiler, connection, function=function, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
raise NotSupportedError("Perimeter cannot use a non-projected field.")
return super().as_sql(compiler, connection, **extra_context)
class PointOnSurface(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 1
class Reverse(GeoFunc):
arity = 1
class Scale(SQLiteDecimalToFloatMixin, GeomOutputGeoFunc):
def __init__(self, expression, x, y, z=0.0, **extra):
expressions = [
expression,
self._handle_param(x, "x", NUMERIC_TYPES),
self._handle_param(y, "y", NUMERIC_TYPES),
]
if z != 0.0:
expressions.append(self._handle_param(z, "z", NUMERIC_TYPES))
super().__init__(*expressions, **extra)
class SnapToGrid(SQLiteDecimalToFloatMixin, GeomOutputGeoFunc):
def __init__(self, expression, *args, **extra):
nargs = len(args)
expressions = [expression]
if nargs in (1, 2):
expressions.extend(
[self._handle_param(arg, "", NUMERIC_TYPES) for arg in args]
)
elif nargs == 4:
# Reverse origin and size param ordering
expressions += [
*(self._handle_param(arg, "", NUMERIC_TYPES) for arg in args[2:]),
*(self._handle_param(arg, "", NUMERIC_TYPES) for arg in args[0:2]),
]
else:
raise ValueError("Must provide 1, 2, or 4 arguments to `SnapToGrid`.")
super().__init__(*expressions, **extra)
class SymDifference(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
class Transform(GeomOutputGeoFunc):
def __init__(self, expression, srid, **extra):
expressions = [
expression,
self._handle_param(srid, "srid", int),
]
if "output_field" not in extra:
extra["output_field"] = GeometryField(srid=srid)
super().__init__(*expressions, **extra)
class Translate(Scale):
def as_sqlite(self, compiler, connection, **extra_context):
clone = self.copy()
if len(self.source_expressions) < 4:
# Always provide the z parameter for ST_Translate
clone.source_expressions.append(Value(0))
return super(Translate, clone).as_sqlite(compiler, connection, **extra_context)
class Union(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
|
5f247d617d9e4d84de6ce57cc020762db3bccc0f942abfc51f8a67b7b823ec35 | """
This module contains the spatial lookup types, and the `get_geo_where_clause`
routine for Oracle Spatial.
Please note that WKT support is broken on the XE version, and thus
this backend will not work on such platforms. Specifically, XE lacks
support for an internal JVM, and Java libraries are required to use
the WKT constructors.
"""
import re
from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
from django.contrib.gis.db.backends.oracle.adapter import OracleSpatialAdapter
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.geos.geometry import GEOSGeometry, GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.db.backends.oracle.operations import DatabaseOperations
DEFAULT_TOLERANCE = "0.05"
class SDOOperator(SpatialOperator):
sql_template = "%(func)s(%(lhs)s, %(rhs)s) = 'TRUE'"
class SDODWithin(SpatialOperator):
sql_template = "SDO_WITHIN_DISTANCE(%(lhs)s, %(rhs)s, %%s) = 'TRUE'"
class SDODisjoint(SpatialOperator):
sql_template = (
"SDO_GEOM.RELATE(%%(lhs)s, 'DISJOINT', %%(rhs)s, %s) = 'DISJOINT'"
% DEFAULT_TOLERANCE
)
class SDORelate(SpatialOperator):
sql_template = "SDO_RELATE(%(lhs)s, %(rhs)s, 'mask=%(mask)s') = 'TRUE'"
def check_relate_argument(self, arg):
masks = (
"TOUCH|OVERLAPBDYDISJOINT|OVERLAPBDYINTERSECT|EQUAL|INSIDE|COVEREDBY|"
"CONTAINS|COVERS|ANYINTERACT|ON"
)
mask_regex = re.compile(r"^(%s)(\+(%s))*$" % (masks, masks), re.I)
if not isinstance(arg, str) or not mask_regex.match(arg):
raise ValueError('Invalid SDO_RELATE mask: "%s"' % arg)
def as_sql(self, connection, lookup, template_params, sql_params):
template_params["mask"] = sql_params[-1]
return super().as_sql(connection, lookup, template_params, sql_params[:-1])
class OracleOperations(BaseSpatialOperations, DatabaseOperations):
name = "oracle"
oracle = True
disallowed_aggregates = (models.Collect, models.Extent3D, models.MakeLine)
Adapter = OracleSpatialAdapter
extent = "SDO_AGGR_MBR"
unionagg = "SDO_AGGR_UNION"
from_text = "SDO_GEOMETRY"
function_names = {
"Area": "SDO_GEOM.SDO_AREA",
"AsGeoJSON": "SDO_UTIL.TO_GEOJSON",
"AsWKB": "SDO_UTIL.TO_WKBGEOMETRY",
"AsWKT": "SDO_UTIL.TO_WKTGEOMETRY",
"BoundingCircle": "SDO_GEOM.SDO_MBC",
"Centroid": "SDO_GEOM.SDO_CENTROID",
"Difference": "SDO_GEOM.SDO_DIFFERENCE",
"Distance": "SDO_GEOM.SDO_DISTANCE",
"Envelope": "SDO_GEOM_MBR",
"Intersection": "SDO_GEOM.SDO_INTERSECTION",
"IsValid": "SDO_GEOM.VALIDATE_GEOMETRY_WITH_CONTEXT",
"Length": "SDO_GEOM.SDO_LENGTH",
"NumGeometries": "SDO_UTIL.GETNUMELEM",
"NumPoints": "SDO_UTIL.GETNUMVERTICES",
"Perimeter": "SDO_GEOM.SDO_LENGTH",
"PointOnSurface": "SDO_GEOM.SDO_POINTONSURFACE",
"Reverse": "SDO_UTIL.REVERSE_LINESTRING",
"SymDifference": "SDO_GEOM.SDO_XOR",
"Transform": "SDO_CS.TRANSFORM",
"Union": "SDO_GEOM.SDO_UNION",
}
# We want to get SDO Geometries as WKT because it is much easier to
# instantiate GEOS proxies from WKT than SDO_GEOMETRY(...) strings.
# However, this adversely affects performance (i.e., Java is called
# to convert to WKT on every query). If someone wishes to write a
# SDO_GEOMETRY(...) parser in Python, let me know =)
select = "SDO_UTIL.TO_WKBGEOMETRY(%s)"
gis_operators = {
"contains": SDOOperator(func="SDO_CONTAINS"),
"coveredby": SDOOperator(func="SDO_COVEREDBY"),
"covers": SDOOperator(func="SDO_COVERS"),
"disjoint": SDODisjoint(),
"intersects": SDOOperator(
func="SDO_OVERLAPBDYINTERSECT"
), # TODO: Is this really the same as ST_Intersects()?
"equals": SDOOperator(func="SDO_EQUAL"),
"exact": SDOOperator(func="SDO_EQUAL"),
"overlaps": SDOOperator(func="SDO_OVERLAPS"),
"same_as": SDOOperator(func="SDO_EQUAL"),
# Oracle uses a different syntax, e.g., 'mask=inside+touch'
"relate": SDORelate(),
"touches": SDOOperator(func="SDO_TOUCH"),
"within": SDOOperator(func="SDO_INSIDE"),
"dwithin": SDODWithin(),
}
unsupported_functions = {
"AsKML",
"AsSVG",
"Azimuth",
"ForcePolygonCW",
"GeoHash",
"GeometryDistance",
"IsEmpty",
"LineLocatePoint",
"MakeValid",
"MemSize",
"Scale",
"SnapToGrid",
"Translate",
}
def geo_quote_name(self, name):
return super().geo_quote_name(name).upper()
def convert_extent(self, clob):
if clob:
# Generally, Oracle returns a polygon for the extent -- however,
# it can return a single point if there's only one Point in the
# table.
ext_geom = GEOSGeometry(memoryview(clob.read()))
gtype = str(ext_geom.geom_type)
if gtype == "Polygon":
# Construct the 4-tuple from the coordinates in the polygon.
shell = ext_geom.shell
ll, ur = shell[0][:2], shell[2][:2]
elif gtype == "Point":
ll = ext_geom.coords[:2]
ur = ll
else:
raise Exception(
"Unexpected geometry type returned for extent: %s" % gtype
)
xmin, ymin = ll
xmax, ymax = ur
return (xmin, ymin, xmax, ymax)
else:
return None
def geo_db_type(self, f):
"""
Return the geometry database type for Oracle. Unlike other spatial
backends, no stored procedure is necessary and it's the same for all
geometry types.
"""
return "MDSYS.SDO_GEOMETRY"
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters given the value and the lookup type.
On Oracle, geometry columns with a geodetic coordinate system behave
implicitly like a geography column, and thus meters will be used as
the distance parameter on them.
"""
if not value:
return []
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
dist_param = value.m
else:
dist_param = getattr(
value, Distance.unit_attname(f.units_name(self.connection))
)
else:
dist_param = value
# dwithin lookups on Oracle require a special string parameter
# that starts with "distance=".
if lookup_type == "dwithin":
dist_param = "distance=%s" % dist_param
return [dist_param]
def get_geom_placeholder(self, f, value, compiler):
if value is None:
return "NULL"
return super().get_geom_placeholder(f, value, compiler)
def spatial_aggregate_name(self, agg_name):
"""
Return the spatial aggregate SQL name.
"""
agg_name = "unionagg" if agg_name.lower() == "union" else agg_name.lower()
return getattr(self, agg_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
from django.contrib.gis.db.backends.oracle.models import OracleGeometryColumns
return OracleGeometryColumns
def spatial_ref_sys(self):
from django.contrib.gis.db.backends.oracle.models import OracleSpatialRefSys
return OracleSpatialRefSys
def modify_insert_params(self, placeholder, params):
"""Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial
backend due to #10888.
"""
if placeholder == "NULL":
return []
return super().modify_insert_params(placeholder, params)
def get_geometry_converter(self, expression):
read = wkb_r().read
srid = expression.output_field.srid
if srid == -1:
srid = None
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
if value is not None:
geom = GEOSGeometryBase(read(memoryview(value.read())), geom_class)
if srid:
geom.srid = srid
return geom
return converter
def get_area_att_for_field(self, field):
return "sq_m"
|
042a1940704ebfd9effc4e19f23c9e6e3a848ee5684db9a430f4d5d84ce81e2d | from django.contrib.gis.db.models import GeometryField
from django.contrib.gis.db.models.functions import Distance
from django.contrib.gis.measure import Area as AreaMeasure
from django.contrib.gis.measure import Distance as DistanceMeasure
from django.db import NotSupportedError
from django.utils.functional import cached_property
class BaseSpatialOperations:
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mariadb = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = "%s"
@cached_property
def select_extent(self):
return self.select
# Aggregates
disallowed_aggregates = ()
geom_func_prefix = ""
# Mapping between Django function names and backend names, when names do not
# match; used in spatial_function_name().
function_names = {}
# Set of known unsupported functions of the backend
unsupported_functions = {
"Area",
"AsGeoJSON",
"AsGML",
"AsKML",
"AsSVG",
"Azimuth",
"BoundingCircle",
"Centroid",
"Difference",
"Distance",
"Envelope",
"GeoHash",
"GeometryDistance",
"Intersection",
"IsEmpty",
"IsValid",
"Length",
"LineLocatePoint",
"MakeValid",
"MemSize",
"NumGeometries",
"NumPoints",
"Perimeter",
"PointOnSurface",
"Reverse",
"Scale",
"SnapToGrid",
"SymDifference",
"Transform",
"Translate",
"Union",
}
# Constructors
from_text = False
# Default conversion functions for aggregates; will be overridden if implemented
# for the spatial backend.
def convert_extent(self, box, srid):
raise NotImplementedError(
"Aggregate extent not implemented for this spatial backend."
)
def convert_extent3d(self, box, srid):
raise NotImplementedError(
"Aggregate 3D extent not implemented for this spatial backend."
)
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Return the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError(
"subclasses of BaseSpatialOperations must provide a geo_db_type() method"
)
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError(
"Distance operations not available on this spatial backend."
)
def get_geom_placeholder(self, f, value, compiler):
"""
Return the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
def transform_value(value, field):
return value is not None and value.srid != field.srid
if hasattr(value, "as_sql"):
return (
"%s(%%s, %s)" % (self.spatial_function_name("Transform"), f.srid)
if transform_value(value.output_field, f)
else "%s"
)
if transform_value(value, f):
# Add Transform() to the SQL placeholder.
return "%s(%s(%%s,%s), %s)" % (
self.spatial_function_name("Transform"),
self.from_text,
value.srid,
f.srid,
)
elif self.connection.features.has_spatialrefsys_table:
return "%s(%%s,%s)" % (self.from_text, f.srid)
else:
# For backwards compatibility on MySQL (#27464).
return "%s(%%s)" % self.from_text
def check_expression_support(self, expression):
if isinstance(expression, self.disallowed_aggregates):
raise NotSupportedError(
"%s spatial aggregation is not supported by this database backend."
% expression.name
)
super().check_expression_support(expression)
def spatial_aggregate_name(self, agg_name):
raise NotImplementedError(
"Aggregate support not implemented for this spatial backend."
)
def spatial_function_name(self, func_name):
if func_name in self.unsupported_functions:
raise NotSupportedError(
"This backend doesn't support the %s function." % func_name
)
return self.function_names.get(func_name, self.geom_func_prefix + func_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
raise NotImplementedError(
"Subclasses of BaseSpatialOperations must provide a geometry_columns() "
"method."
)
def spatial_ref_sys(self):
raise NotImplementedError(
"subclasses of BaseSpatialOperations must a provide spatial_ref_sys() "
"method"
)
distance_expr_for_lookup = staticmethod(Distance)
def get_db_converters(self, expression):
converters = super().get_db_converters(expression)
if isinstance(expression.output_field, GeometryField):
converters.append(self.get_geometry_converter(expression))
return converters
def get_geometry_converter(self, expression):
raise NotImplementedError(
"Subclasses of BaseSpatialOperations must provide a "
"get_geometry_converter() method."
)
def get_area_att_for_field(self, field):
if field.geodetic(self.connection):
if self.connection.features.supports_area_geodetic:
return "sq_m"
raise NotImplementedError(
"Area on geodetic coordinate systems not supported."
)
else:
units_name = field.units_name(self.connection)
if units_name:
return AreaMeasure.unit_attname(units_name)
def get_distance_att_for_field(self, field):
dist_att = None
if field.geodetic(self.connection):
if self.connection.features.supports_distance_geodetic:
dist_att = "m"
else:
units = field.units_name(self.connection)
if units:
dist_att = DistanceMeasure.unit_attname(units)
return dist_att
|
c0f2a2924b7ab50fe563037b741540a93c112174a183069e01bb27dfdfb2b5cb | from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.geos.geometry import GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.db.backends.mysql.operations import DatabaseOperations
from django.utils.functional import cached_property
class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
name = "mysql"
geom_func_prefix = "ST_"
Adapter = WKTAdapter
@cached_property
def mariadb(self):
return self.connection.mysql_is_mariadb
@cached_property
def mysql(self):
return not self.connection.mysql_is_mariadb
@cached_property
def select(self):
return self.geom_func_prefix + "AsBinary(%s)"
@cached_property
def from_text(self):
return self.geom_func_prefix + "GeomFromText"
@cached_property
def gis_operators(self):
operators = {
"bbcontains": SpatialOperator(
func="MBRContains"
), # For consistency w/PostGIS API
"bboverlaps": SpatialOperator(func="MBROverlaps"), # ...
"contained": SpatialOperator(func="MBRWithin"), # ...
"contains": SpatialOperator(func="ST_Contains"),
"crosses": SpatialOperator(func="ST_Crosses"),
"disjoint": SpatialOperator(func="ST_Disjoint"),
"equals": SpatialOperator(func="ST_Equals"),
"exact": SpatialOperator(func="ST_Equals"),
"intersects": SpatialOperator(func="ST_Intersects"),
"overlaps": SpatialOperator(func="ST_Overlaps"),
"same_as": SpatialOperator(func="ST_Equals"),
"touches": SpatialOperator(func="ST_Touches"),
"within": SpatialOperator(func="ST_Within"),
}
if self.connection.mysql_is_mariadb:
operators["relate"] = SpatialOperator(func="ST_Relate")
return operators
disallowed_aggregates = (
models.Collect,
models.Extent,
models.Extent3D,
models.MakeLine,
models.Union,
)
@cached_property
def unsupported_functions(self):
unsupported = {
"AsGML",
"AsKML",
"AsSVG",
"Azimuth",
"BoundingCircle",
"ForcePolygonCW",
"GeometryDistance",
"IsEmpty",
"LineLocatePoint",
"MakeValid",
"MemSize",
"Perimeter",
"PointOnSurface",
"Reverse",
"Scale",
"SnapToGrid",
"Transform",
"Translate",
}
if self.connection.mysql_is_mariadb:
unsupported.remove("PointOnSurface")
unsupported.update({"GeoHash", "IsValid"})
return unsupported
def geo_db_type(self, f):
return f.geom_type
def get_distance(self, f, value, lookup_type):
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
raise ValueError(
"Only numeric values of degree units are allowed on "
"geodetic distance queries."
)
dist_param = getattr(
value, Distance.unit_attname(f.units_name(self.connection))
)
else:
dist_param = value
return [dist_param]
def get_geometry_converter(self, expression):
read = wkb_r().read
srid = expression.output_field.srid
if srid == -1:
srid = None
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
if value is not None:
geom = GEOSGeometryBase(read(memoryview(value)), geom_class)
if srid:
geom.srid = srid
return geom
return converter
|
45267ff63a25ba6e5bd001035497e4f38c26ef4ed2cfe74d95dd11f5c0871a2a | """
SQL functions reference lists:
https://www.gaia-gis.it/gaia-sins/spatialite-sql-4.3.0.html
"""
from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
from django.contrib.gis.db.backends.spatialite.adapter import SpatiaLiteAdapter
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.geos.geometry import GEOSGeometry, GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.sqlite3.operations import DatabaseOperations
from django.utils.functional import cached_property
from django.utils.version import get_version_tuple
class SpatialiteNullCheckOperator(SpatialOperator):
def as_sql(self, connection, lookup, template_params, sql_params):
sql, params = super().as_sql(connection, lookup, template_params, sql_params)
return "%s > 0" % sql, params
class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations):
name = "spatialite"
spatialite = True
Adapter = SpatiaLiteAdapter
collect = "Collect"
extent = "Extent"
makeline = "MakeLine"
unionagg = "GUnion"
from_text = "GeomFromText"
gis_operators = {
# Binary predicates
"equals": SpatialiteNullCheckOperator(func="Equals"),
"disjoint": SpatialiteNullCheckOperator(func="Disjoint"),
"touches": SpatialiteNullCheckOperator(func="Touches"),
"crosses": SpatialiteNullCheckOperator(func="Crosses"),
"within": SpatialiteNullCheckOperator(func="Within"),
"overlaps": SpatialiteNullCheckOperator(func="Overlaps"),
"contains": SpatialiteNullCheckOperator(func="Contains"),
"intersects": SpatialiteNullCheckOperator(func="Intersects"),
"relate": SpatialiteNullCheckOperator(func="Relate"),
"coveredby": SpatialiteNullCheckOperator(func="CoveredBy"),
"covers": SpatialiteNullCheckOperator(func="Covers"),
# Returns true if B's bounding box completely contains A's bounding box.
"contained": SpatialOperator(func="MbrWithin"),
# Returns true if A's bounding box completely contains B's bounding box.
"bbcontains": SpatialOperator(func="MbrContains"),
# Returns true if A's bounding box overlaps B's bounding box.
"bboverlaps": SpatialOperator(func="MbrOverlaps"),
# These are implemented here as synonyms for Equals
"same_as": SpatialiteNullCheckOperator(func="Equals"),
"exact": SpatialiteNullCheckOperator(func="Equals"),
# Distance predicates
"dwithin": SpatialOperator(func="PtDistWithin"),
}
disallowed_aggregates = (models.Extent3D,)
select = "CAST (AsEWKB(%s) AS BLOB)"
function_names = {
"AsWKB": "St_AsBinary",
"ForcePolygonCW": "ST_ForceLHR",
"Length": "ST_Length",
"LineLocatePoint": "ST_Line_Locate_Point",
"NumPoints": "ST_NPoints",
"Reverse": "ST_Reverse",
"Scale": "ScaleCoords",
"Translate": "ST_Translate",
"Union": "ST_Union",
}
@cached_property
def unsupported_functions(self):
unsupported = {"BoundingCircle", "GeometryDistance", "IsEmpty", "MemSize"}
if not self.geom_lib_version():
unsupported |= {"Azimuth", "GeoHash", "MakeValid"}
return unsupported
@cached_property
def spatial_version(self):
"""Determine the version of the SpatiaLite library."""
try:
version = self.spatialite_version_tuple()[1:]
except Exception as exc:
raise ImproperlyConfigured(
'Cannot determine the SpatiaLite version for the "%s" database. '
"Was the SpatiaLite initialization SQL loaded on this database?"
% (self.connection.settings_dict["NAME"],)
) from exc
if version < (4, 3, 0):
raise ImproperlyConfigured("GeoDjango supports SpatiaLite 4.3.0 and above.")
return version
def convert_extent(self, box):
"""
Convert the polygon data received from SpatiaLite to min/max values.
"""
if box is None:
return None
shell = GEOSGeometry(box).shell
xmin, ymin = shell[0][:2]
xmax, ymax = shell[2][:2]
return (xmin, ymin, xmax, ymax)
def geo_db_type(self, f):
"""
Return None because geometry columns are added via the
`AddGeometryColumn` stored procedure on SpatiaLite.
"""
return None
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
if not value:
return []
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
if lookup_type == "dwithin":
raise ValueError(
"Only numeric values of degree units are allowed on "
"geographic DWithin queries."
)
dist_param = value.m
else:
dist_param = getattr(
value, Distance.unit_attname(f.units_name(self.connection))
)
else:
dist_param = value
return [dist_param]
def _get_spatialite_func(self, func):
"""
Helper routine for calling SpatiaLite functions and returning
their result.
Any error occurring in this method should be handled by the caller.
"""
cursor = self.connection._cursor()
try:
cursor.execute("SELECT %s" % func)
row = cursor.fetchone()
finally:
cursor.close()
return row[0]
def geos_version(self):
"Return the version of GEOS used by SpatiaLite as a string."
return self._get_spatialite_func("geos_version()")
def proj_version(self):
"""Return the version of the PROJ library used by SpatiaLite."""
return self._get_spatialite_func("proj4_version()")
def lwgeom_version(self):
"""Return the version of LWGEOM library used by SpatiaLite."""
return self._get_spatialite_func("lwgeom_version()")
def rttopo_version(self):
"""Return the version of RTTOPO library used by SpatiaLite."""
return self._get_spatialite_func("rttopo_version()")
def geom_lib_version(self):
"""
Return the version of the version-dependant geom library used by
SpatiaLite.
"""
if self.spatial_version >= (5,):
return self.rttopo_version()
else:
return self.lwgeom_version()
def spatialite_version(self):
"Return the SpatiaLite library version as a string."
return self._get_spatialite_func("spatialite_version()")
def spatialite_version_tuple(self):
"""
Return the SpatiaLite version as a tuple (version string, major,
minor, subminor).
"""
version = self.spatialite_version()
return (version,) + get_version_tuple(version)
def spatial_aggregate_name(self, agg_name):
"""
Return the spatial aggregate SQL template and function for the
given Aggregate instance.
"""
agg_name = "unionagg" if agg_name.lower() == "union" else agg_name.lower()
return getattr(self, agg_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
from django.contrib.gis.db.backends.spatialite.models import (
SpatialiteGeometryColumns,
)
return SpatialiteGeometryColumns
def spatial_ref_sys(self):
from django.contrib.gis.db.backends.spatialite.models import (
SpatialiteSpatialRefSys,
)
return SpatialiteSpatialRefSys
def get_geometry_converter(self, expression):
geom_class = expression.output_field.geom_class
read = wkb_r().read
def converter(value, expression, connection):
return None if value is None else GEOSGeometryBase(read(value), geom_class)
return converter
|
a8e1092d04c80b561d943a099292c2895414e06032d1df7f0e77ae8bbc38d5b3 | from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
from django.db.backends.postgresql.features import (
DatabaseFeatures as PsycopgDatabaseFeatures,
)
class DatabaseFeatures(BaseSpatialFeatures, PsycopgDatabaseFeatures):
supports_geography = True
supports_3d_storage = True
supports_3d_functions = True
supports_raster = True
supports_empty_geometries = True
empty_intersection_returns_none = False
|
7826b6cbebf7a862de35b148e044458f6526aa0600135f67b8bdd28031669b4a | import struct
from django.core.exceptions import ValidationError
from .const import (
BANDTYPE_FLAG_HASNODATA,
BANDTYPE_PIXTYPE_MASK,
GDAL_TO_POSTGIS,
GDAL_TO_STRUCT,
POSTGIS_HEADER_STRUCTURE,
POSTGIS_TO_GDAL,
STRUCT_SIZE,
)
def pack(structure, data):
"""
Pack data into hex string with little endian format.
"""
return struct.pack("<" + structure, *data)
def unpack(structure, data):
"""
Unpack little endian hexlified binary string into a list.
"""
return struct.unpack("<" + structure, bytes.fromhex(data))
def chunk(data, index):
"""
Split a string into two parts at the input index.
"""
return data[:index], data[index:]
def from_pgraster(data):
"""
Convert a PostGIS HEX String into a dictionary.
"""
if data is None:
return
# Split raster header from data
header, data = chunk(data, 122)
header = unpack(POSTGIS_HEADER_STRUCTURE, header)
# Parse band data
bands = []
pixeltypes = []
while data:
# Get pixel type for this band
pixeltype_with_flags, data = chunk(data, 2)
pixeltype_with_flags = unpack("B", pixeltype_with_flags)[0]
pixeltype = pixeltype_with_flags & BANDTYPE_PIXTYPE_MASK
# Convert datatype from PostGIS to GDAL & get pack type and size
pixeltype = POSTGIS_TO_GDAL[pixeltype]
pack_type = GDAL_TO_STRUCT[pixeltype]
pack_size = 2 * STRUCT_SIZE[pack_type]
# Parse band nodata value. The nodata value is part of the
# PGRaster string even if the nodata flag is True, so it always
# has to be chunked off the data string.
nodata, data = chunk(data, pack_size)
nodata = unpack(pack_type, nodata)[0]
# Chunk and unpack band data (pack size times nr of pixels)
band, data = chunk(data, pack_size * header[10] * header[11])
band_result = {"data": bytes.fromhex(band)}
# Set the nodata value if the nodata flag is set.
if pixeltype_with_flags & BANDTYPE_FLAG_HASNODATA:
band_result["nodata_value"] = nodata
# Append band data to band list
bands.append(band_result)
# Store pixeltype of this band in pixeltypes array
pixeltypes.append(pixeltype)
# Check that all bands have the same pixeltype.
# This is required by GDAL. PostGIS rasters could have different pixeltypes
# for bands of the same raster.
if len(set(pixeltypes)) != 1:
raise ValidationError("Band pixeltypes are not all equal.")
return {
"srid": int(header[9]),
"width": header[10],
"height": header[11],
"datatype": pixeltypes[0],
"origin": (header[5], header[6]),
"scale": (header[3], header[4]),
"skew": (header[7], header[8]),
"bands": bands,
}
def to_pgraster(rast):
"""
Convert a GDALRaster into PostGIS Raster format.
"""
# Prepare the raster header data as a tuple. The first two numbers are
# the endianness and the PostGIS Raster Version, both are fixed by
# PostGIS at the moment.
rasterheader = (
1,
0,
len(rast.bands),
rast.scale.x,
rast.scale.y,
rast.origin.x,
rast.origin.y,
rast.skew.x,
rast.skew.y,
rast.srs.srid,
rast.width,
rast.height,
)
# Pack raster header.
result = pack(POSTGIS_HEADER_STRUCTURE, rasterheader)
for band in rast.bands:
# The PostGIS raster band header has exactly two elements, a 8BUI byte
# and the nodata value.
#
# The 8BUI stores both the PostGIS pixel data type and a nodata flag.
# It is composed as the datatype with BANDTYPE_FLAG_HASNODATA (1 << 6)
# for existing nodata values:
# 8BUI_VALUE = PG_PIXEL_TYPE (0-11) | BANDTYPE_FLAG_HASNODATA
#
# For example, if the byte value is 71, then the datatype is
# 71 & ~BANDTYPE_FLAG_HASNODATA = 7 (32BSI)
# and the nodata value is True.
structure = "B" + GDAL_TO_STRUCT[band.datatype()]
# Get band pixel type in PostGIS notation
pixeltype = GDAL_TO_POSTGIS[band.datatype()]
# Set the nodata flag
if band.nodata_value is not None:
pixeltype |= BANDTYPE_FLAG_HASNODATA
# Pack band header
bandheader = pack(structure, (pixeltype, band.nodata_value or 0))
# Add packed header and band data to result
result += bandheader + band.data(as_memoryview=True)
return result
|
1ab06581fa9f208674db0c6a4675e450a43052530abebd6e7a6afbf7fb48ec43 | from functools import lru_cache
from django.db.backends.base.base import NO_DB_ALIAS
from django.db.backends.postgresql.base import DatabaseWrapper as PsycopgDatabaseWrapper
from django.db.backends.postgresql.psycopg_any import is_psycopg3
from .adapter import PostGISAdapter
from .features import DatabaseFeatures
from .introspection import PostGISIntrospection
from .operations import PostGISOperations
from .schema import PostGISSchemaEditor
if is_psycopg3:
from psycopg.adapt import Dumper
from psycopg.pq import Format
from psycopg.types import TypeInfo
from psycopg.types.string import TextBinaryLoader, TextLoader
class GeometryType:
pass
class GeographyType:
pass
class RasterType:
pass
class BaseTextDumper(Dumper):
def dump(self, obj):
# Return bytes as hex for text formatting
return obj.ewkb.hex().encode()
class BaseBinaryDumper(Dumper):
format = Format.BINARY
def dump(self, obj):
return obj.ewkb
@lru_cache
def postgis_adapters(geo_oid, geog_oid, raster_oid):
class BaseDumper(Dumper):
def __init_subclass__(cls, base_dumper):
super().__init_subclass__()
cls.GeometryDumper = type(
"GeometryDumper", (base_dumper,), {"oid": geo_oid}
)
cls.GeographyDumper = type(
"GeographyDumper", (base_dumper,), {"oid": geog_oid}
)
cls.RasterDumper = type(
"RasterDumper", (BaseTextDumper,), {"oid": raster_oid}
)
def get_key(self, obj, format):
if obj.is_geometry:
return GeographyType if obj.geography else GeometryType
else:
return RasterType
def upgrade(self, obj, format):
if obj.is_geometry:
if obj.geography:
return self.GeographyDumper(GeographyType)
else:
return self.GeometryDumper(GeometryType)
else:
return self.RasterDumper(RasterType)
def dump(self, obj):
raise NotImplementedError
class PostGISTextDumper(BaseDumper, base_dumper=BaseTextDumper):
pass
class PostGISBinaryDumper(BaseDumper, base_dumper=BaseBinaryDumper):
format = Format.BINARY
return PostGISTextDumper, PostGISBinaryDumper
class DatabaseWrapper(PsycopgDatabaseWrapper):
SchemaEditorClass = PostGISSchemaEditor
_type_infos = {
"geometry": {},
"geography": {},
"raster": {},
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if kwargs.get("alias", "") != NO_DB_ALIAS:
self.features = DatabaseFeatures(self)
self.ops = PostGISOperations(self)
self.introspection = PostGISIntrospection(self)
def prepare_database(self):
super().prepare_database()
# Check that postgis extension is installed.
with self.cursor() as cursor:
cursor.execute("SELECT 1 FROM pg_extension WHERE extname = %s", ["postgis"])
if bool(cursor.fetchone()):
return
cursor.execute("CREATE EXTENSION IF NOT EXISTS postgis")
if is_psycopg3:
# Ensure adapters are registers if PostGIS is used within this
# connection.
self.register_geometry_adapters(self.connection, True)
def get_new_connection(self, conn_params):
connection = super().get_new_connection(conn_params)
if is_psycopg3:
self.register_geometry_adapters(connection)
return connection
if is_psycopg3:
def _register_type(self, pg_connection, typename):
registry = self._type_infos[typename]
try:
info = registry[self.alias]
except KeyError:
info = TypeInfo.fetch(pg_connection, typename)
registry[self.alias] = info
if info: # Can be None if the type does not exist (yet).
info.register(pg_connection)
pg_connection.adapters.register_loader(info.oid, TextLoader)
pg_connection.adapters.register_loader(info.oid, TextBinaryLoader)
return info.oid if info else None
def register_geometry_adapters(self, pg_connection, clear_caches=False):
if clear_caches:
for typename in self._type_infos:
self._type_infos[typename].pop(self.alias, None)
geo_oid = self._register_type(pg_connection, "geometry")
geog_oid = self._register_type(pg_connection, "geography")
raster_oid = self._register_type(pg_connection, "raster")
PostGISTextDumper, PostGISBinaryDumper = postgis_adapters(
geo_oid, geog_oid, raster_oid
)
pg_connection.adapters.register_dumper(PostGISAdapter, PostGISTextDumper)
pg_connection.adapters.register_dumper(PostGISAdapter, PostGISBinaryDumper)
|
e4fdac12b7c99a626b84cb287d55fce22277157639d983bd4d804e6e4eb4a91d | import re
from django.conf import settings
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import GeometryField, RasterField
from django.contrib.gis.gdal import GDALRaster
from django.contrib.gis.geos.geometry import GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.core.exceptions import ImproperlyConfigured
from django.db import NotSupportedError, ProgrammingError
from django.db.backends.postgresql.operations import DatabaseOperations
from django.db.backends.postgresql.psycopg_any import is_psycopg3
from django.db.models import Func, Value
from django.utils.functional import cached_property
from django.utils.version import get_version_tuple
from .adapter import PostGISAdapter
from .models import PostGISGeometryColumns, PostGISSpatialRefSys
from .pgraster import from_pgraster
# Identifier to mark raster lookups as bilateral.
BILATERAL = "bilateral"
class PostGISOperator(SpatialOperator):
def __init__(self, geography=False, raster=False, **kwargs):
# Only a subset of the operators and functions are available for the
# geography type.
self.geography = geography
# Only a subset of the operators and functions are available for the
# raster type. Lookups that don't support raster will be converted to
# polygons. If the raster argument is set to BILATERAL, then the
# operator cannot handle mixed geom-raster lookups.
self.raster = raster
super().__init__(**kwargs)
def as_sql(self, connection, lookup, template_params, *args):
if lookup.lhs.output_field.geography and not self.geography:
raise ValueError(
'PostGIS geography does not support the "%s" '
"function/operator." % (self.func or self.op,)
)
template_params = self.check_raster(lookup, template_params)
return super().as_sql(connection, lookup, template_params, *args)
def check_raster(self, lookup, template_params):
spheroid = lookup.rhs_params and lookup.rhs_params[-1] == "spheroid"
# Check which input is a raster.
lhs_is_raster = lookup.lhs.field.geom_type == "RASTER"
rhs_is_raster = isinstance(lookup.rhs, GDALRaster)
# Look for band indices and inject them if provided.
if lookup.band_lhs is not None and lhs_is_raster:
if not self.func:
raise ValueError(
"Band indices are not allowed for this operator, it works on bbox "
"only."
)
template_params["lhs"] = "%s, %s" % (
template_params["lhs"],
lookup.band_lhs,
)
if lookup.band_rhs is not None and rhs_is_raster:
if not self.func:
raise ValueError(
"Band indices are not allowed for this operator, it works on bbox "
"only."
)
template_params["rhs"] = "%s, %s" % (
template_params["rhs"],
lookup.band_rhs,
)
# Convert rasters to polygons if necessary.
if not self.raster or spheroid:
# Operators without raster support.
if lhs_is_raster:
template_params["lhs"] = "ST_Polygon(%s)" % template_params["lhs"]
if rhs_is_raster:
template_params["rhs"] = "ST_Polygon(%s)" % template_params["rhs"]
elif self.raster == BILATERAL:
# Operators with raster support but don't support mixed (rast-geom)
# lookups.
if lhs_is_raster and not rhs_is_raster:
template_params["lhs"] = "ST_Polygon(%s)" % template_params["lhs"]
elif rhs_is_raster and not lhs_is_raster:
template_params["rhs"] = "ST_Polygon(%s)" % template_params["rhs"]
return template_params
class ST_Polygon(Func):
function = "ST_Polygon"
def __init__(self, expr):
super().__init__(expr)
expr = self.source_expressions[0]
if isinstance(expr, Value) and not expr._output_field_or_none:
self.source_expressions[0] = Value(
expr.value, output_field=RasterField(srid=expr.value.srid)
)
@cached_property
def output_field(self):
return GeometryField(srid=self.source_expressions[0].field.srid)
class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
name = "postgis"
postgis = True
geom_func_prefix = "ST_"
Adapter = PostGISAdapter
collect = geom_func_prefix + "Collect"
extent = geom_func_prefix + "Extent"
extent3d = geom_func_prefix + "3DExtent"
length3d = geom_func_prefix + "3DLength"
makeline = geom_func_prefix + "MakeLine"
perimeter3d = geom_func_prefix + "3DPerimeter"
unionagg = geom_func_prefix + "Union"
gis_operators = {
"bbcontains": PostGISOperator(op="~", raster=True),
"bboverlaps": PostGISOperator(op="&&", geography=True, raster=True),
"contained": PostGISOperator(op="@", raster=True),
"overlaps_left": PostGISOperator(op="&<", raster=BILATERAL),
"overlaps_right": PostGISOperator(op="&>", raster=BILATERAL),
"overlaps_below": PostGISOperator(op="&<|"),
"overlaps_above": PostGISOperator(op="|&>"),
"left": PostGISOperator(op="<<"),
"right": PostGISOperator(op=">>"),
"strictly_below": PostGISOperator(op="<<|"),
"strictly_above": PostGISOperator(op="|>>"),
"same_as": PostGISOperator(op="~=", raster=BILATERAL),
"exact": PostGISOperator(op="~=", raster=BILATERAL), # alias of same_as
"contains": PostGISOperator(func="ST_Contains", raster=BILATERAL),
"contains_properly": PostGISOperator(
func="ST_ContainsProperly", raster=BILATERAL
),
"coveredby": PostGISOperator(
func="ST_CoveredBy", geography=True, raster=BILATERAL
),
"covers": PostGISOperator(func="ST_Covers", geography=True, raster=BILATERAL),
"crosses": PostGISOperator(func="ST_Crosses"),
"disjoint": PostGISOperator(func="ST_Disjoint", raster=BILATERAL),
"equals": PostGISOperator(func="ST_Equals"),
"intersects": PostGISOperator(
func="ST_Intersects", geography=True, raster=BILATERAL
),
"overlaps": PostGISOperator(func="ST_Overlaps", raster=BILATERAL),
"relate": PostGISOperator(func="ST_Relate"),
"touches": PostGISOperator(func="ST_Touches", raster=BILATERAL),
"within": PostGISOperator(func="ST_Within", raster=BILATERAL),
"dwithin": PostGISOperator(func="ST_DWithin", geography=True, raster=BILATERAL),
}
unsupported_functions = set()
select = "%s" if is_psycopg3 else "%s::bytea"
select_extent = None
@cached_property
def function_names(self):
function_names = {
"AsWKB": "ST_AsBinary",
"AsWKT": "ST_AsText",
"BoundingCircle": "ST_MinimumBoundingCircle",
"NumPoints": "ST_NPoints",
}
return function_names
@cached_property
def spatial_version(self):
"""Determine the version of the PostGIS library."""
# Trying to get the PostGIS version because the function
# signatures will depend on the version used. The cost
# here is a database query to determine the version, which
# can be mitigated by setting `POSTGIS_VERSION` with a 3-tuple
# comprising user-supplied values for the major, minor, and
# subminor revision of PostGIS.
if hasattr(settings, "POSTGIS_VERSION"):
version = settings.POSTGIS_VERSION
else:
# Run a basic query to check the status of the connection so we're
# sure we only raise the error below if the problem comes from
# PostGIS and not from PostgreSQL itself (see #24862).
self._get_postgis_func("version")
try:
vtup = self.postgis_version_tuple()
except ProgrammingError:
raise ImproperlyConfigured(
'Cannot determine PostGIS version for database "%s" '
'using command "SELECT postgis_lib_version()". '
"GeoDjango requires at least PostGIS version 2.5. "
"Was the database created from a spatial database "
"template?" % self.connection.settings_dict["NAME"]
)
version = vtup[1:]
return version
def convert_extent(self, box):
"""
Return a 4-tuple extent for the `Extent` aggregate by converting
the bounding box text returned by PostGIS (`box` argument), for
example: "BOX(-90.0 30.0, -85.0 40.0)".
"""
if box is None:
return None
ll, ur = box[4:-1].split(",")
xmin, ymin = map(float, ll.split())
xmax, ymax = map(float, ur.split())
return (xmin, ymin, xmax, ymax)
def convert_extent3d(self, box3d):
"""
Return a 6-tuple extent for the `Extent3D` aggregate by converting
the 3d bounding-box text returned by PostGIS (`box3d` argument), for
example: "BOX3D(-90.0 30.0 1, -85.0 40.0 2)".
"""
if box3d is None:
return None
ll, ur = box3d[6:-1].split(",")
xmin, ymin, zmin = map(float, ll.split())
xmax, ymax, zmax = map(float, ur.split())
return (xmin, ymin, zmin, xmax, ymax, zmax)
def geo_db_type(self, f):
"""
Return the database field type for the given spatial field.
"""
if f.geom_type == "RASTER":
return "raster"
# Type-based geometries.
# TODO: Support 'M' extension.
if f.dim == 3:
geom_type = f.geom_type + "Z"
else:
geom_type = f.geom_type
if f.geography:
if f.srid != 4326:
raise NotSupportedError(
"PostGIS only supports geography columns with an SRID of 4326."
)
return "geography(%s,%d)" % (geom_type, f.srid)
else:
return "geometry(%s,%d)" % (geom_type, f.srid)
def get_distance(self, f, dist_val, lookup_type):
"""
Retrieve the distance parameters for the given geometry field,
distance lookup value, and the distance lookup type.
This is the most complex implementation of the spatial backends due to
what is supported on geodetic geometry columns vs. what's available on
projected geometry columns. In addition, it has to take into account
the geography column type.
"""
# Getting the distance parameter
value = dist_val[0]
# Shorthand boolean flags.
geodetic = f.geodetic(self.connection)
geography = f.geography
if isinstance(value, Distance):
if geography:
dist_param = value.m
elif geodetic:
if lookup_type == "dwithin":
raise ValueError(
"Only numeric values of degree units are "
"allowed on geographic DWithin queries."
)
dist_param = value.m
else:
dist_param = getattr(
value, Distance.unit_attname(f.units_name(self.connection))
)
else:
# Assuming the distance is in the units of the field.
dist_param = value
return [dist_param]
def get_geom_placeholder(self, f, value, compiler):
"""
Provide a proper substitution value for Geometries or rasters that are
not in the SRID of the field. Specifically, this routine will
substitute in the ST_Transform() function call.
"""
transform_func = self.spatial_function_name("Transform")
if hasattr(value, "as_sql"):
if value.field.srid == f.srid:
placeholder = "%s"
else:
placeholder = "%s(%%s, %s)" % (transform_func, f.srid)
return placeholder
# Get the srid for this object
if value is None:
value_srid = None
else:
value_srid = value.srid
# Adding Transform() to the SQL placeholder if the value srid
# is not equal to the field srid.
if value_srid is None or value_srid == f.srid:
placeholder = "%s"
else:
placeholder = "%s(%%s, %s)" % (transform_func, f.srid)
return placeholder
def _get_postgis_func(self, func):
"""
Helper routine for calling PostGIS functions and returning their result.
"""
# Close out the connection. See #9437.
with self.connection.temporary_connection() as cursor:
cursor.execute("SELECT %s()" % func)
return cursor.fetchone()[0]
def postgis_geos_version(self):
"Return the version of the GEOS library used with PostGIS."
return self._get_postgis_func("postgis_geos_version")
def postgis_lib_version(self):
"Return the version number of the PostGIS library used with PostgreSQL."
return self._get_postgis_func("postgis_lib_version")
def postgis_proj_version(self):
"""Return the version of the PROJ library used with PostGIS."""
return self._get_postgis_func("postgis_proj_version")
def postgis_version(self):
"Return PostGIS version number and compile-time options."
return self._get_postgis_func("postgis_version")
def postgis_full_version(self):
"Return PostGIS version number and compile-time options."
return self._get_postgis_func("postgis_full_version")
def postgis_version_tuple(self):
"""
Return the PostGIS version as a tuple (version string, major,
minor, subminor).
"""
version = self.postgis_lib_version()
return (version,) + get_version_tuple(version)
def proj_version_tuple(self):
"""
Return the version of PROJ used by PostGIS as a tuple of the
major, minor, and subminor release numbers.
"""
proj_regex = re.compile(r"(\d+)\.(\d+)\.(\d+)")
proj_ver_str = self.postgis_proj_version()
m = proj_regex.search(proj_ver_str)
if m:
return tuple(map(int, m.groups()))
else:
raise Exception("Could not determine PROJ version from PostGIS.")
def spatial_aggregate_name(self, agg_name):
if agg_name == "Extent3D":
return self.extent3d
else:
return self.geom_func_prefix + agg_name
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
return PostGISGeometryColumns
def spatial_ref_sys(self):
return PostGISSpatialRefSys
def parse_raster(self, value):
"""Convert a PostGIS HEX String into a dict readable by GDALRaster."""
return from_pgraster(value)
def distance_expr_for_lookup(self, lhs, rhs, **kwargs):
return super().distance_expr_for_lookup(
self._normalize_distance_lookup_arg(lhs),
self._normalize_distance_lookup_arg(rhs),
**kwargs,
)
@staticmethod
def _normalize_distance_lookup_arg(arg):
is_raster = (
arg.field.geom_type == "RASTER"
if hasattr(arg, "field")
else isinstance(arg, GDALRaster)
)
return ST_Polygon(arg) if is_raster else arg
def get_geometry_converter(self, expression):
read = wkb_r().read
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
if isinstance(value, str): # Coming from hex strings.
value = value.encode("ascii")
return None if value is None else GEOSGeometryBase(read(value), geom_class)
return converter
def get_area_att_for_field(self, field):
return "sq_m"
|
754fa8d46421da53dd88d62606077b4139caab72f7258a99724e692a7f81034a | from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.models.expressions import Col, Func
class PostGISSchemaEditor(DatabaseSchemaEditor):
geom_index_type = "GIST"
geom_index_ops_nd = "GIST_GEOMETRY_OPS_ND"
rast_index_template = "ST_ConvexHull(%(expressions)s)"
sql_alter_column_to_3d = (
"ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force3D(%(column)s)::%(type)s"
)
sql_alter_column_to_2d = (
"ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force2D(%(column)s)::%(type)s"
)
def geo_quote_name(self, name):
return self.connection.ops.geo_quote_name(name)
def _field_should_be_indexed(self, model, field):
if getattr(field, "spatial_index", False):
return True
return super()._field_should_be_indexed(model, field)
def _create_index_sql(self, model, *, fields=None, **kwargs):
if fields is None or len(fields) != 1 or not hasattr(fields[0], "geodetic"):
return super()._create_index_sql(model, fields=fields, **kwargs)
field = fields[0]
expressions = None
opclasses = None
if field.geom_type == "RASTER":
# For raster fields, wrap index creation SQL statement with ST_ConvexHull.
# Indexes on raster columns are based on the convex hull of the raster.
expressions = Func(Col(None, field), template=self.rast_index_template)
fields = None
elif field.dim > 2 and not field.geography:
# Use "nd" ops which are fast on multidimensional cases
opclasses = [self.geom_index_ops_nd]
name = kwargs.get("name")
if not name:
name = self._create_index_name(model._meta.db_table, [field.column], "_id")
return super()._create_index_sql(
model,
fields=fields,
name=name,
using=" USING %s" % self.geom_index_type,
opclasses=opclasses,
expressions=expressions,
)
def _alter_column_type_sql(
self, table, old_field, new_field, new_type, old_collation, new_collation
):
"""
Special case when dimension changed.
"""
if not hasattr(old_field, "dim") or not hasattr(new_field, "dim"):
return super()._alter_column_type_sql(
table, old_field, new_field, new_type, old_collation, new_collation
)
if old_field.dim == 2 and new_field.dim == 3:
sql_alter = self.sql_alter_column_to_3d
elif old_field.dim == 3 and new_field.dim == 2:
sql_alter = self.sql_alter_column_to_2d
else:
sql_alter = self.sql_alter_column_type
return (
(
sql_alter
% {
"column": self.quote_name(new_field.column),
"type": new_type,
"collation": "",
},
[],
),
[],
)
|
f358703e2ed9d9bec18ca3136c0ecc284b8281261cf0c9c641f07648bf415aa4 | """
This object provides quoting for GEOS geometries into PostgreSQL/PostGIS.
"""
from django.contrib.gis.db.backends.postgis.pgraster import to_pgraster
from django.contrib.gis.geos import GEOSGeometry
from django.db.backends.postgresql.psycopg_any import sql
class PostGISAdapter:
def __init__(self, obj, geography=False):
"""
Initialize on the spatial object.
"""
self.is_geometry = isinstance(obj, (GEOSGeometry, PostGISAdapter))
# Getting the WKB (in string form, to allow easy pickling of
# the adaptor) and the SRID from the geometry or raster.
if self.is_geometry:
self.ewkb = bytes(obj.ewkb)
else:
self.ewkb = to_pgraster(obj)
self.srid = obj.srid
self.geography = geography
def __conform__(self, proto):
"""Does the given protocol conform to what Psycopg2 expects?"""
from psycopg2.extensions import ISQLQuote
if proto == ISQLQuote:
return self
else:
raise Exception(
"Error implementing psycopg2 protocol. Is psycopg2 installed?"
)
def __eq__(self, other):
return isinstance(other, PostGISAdapter) and self.ewkb == other.ewkb
def __hash__(self):
return hash(self.ewkb)
def __str__(self):
return self.getquoted().decode()
@classmethod
def _fix_polygon(cls, poly):
return poly
def getquoted(self):
"""
Return a properly quoted string for use in PostgreSQL/PostGIS.
"""
if self.is_geometry:
# Psycopg will figure out whether to use E'\\000' or '\000'.
return b"%s(%s)" % (
b"ST_GeogFromWKB" if self.geography else b"ST_GeomFromEWKB",
sql.quote(self.ewkb).encode(),
)
else:
# For rasters, add explicit type cast to WKB string.
return b"'%s'::raster" % self.ewkb.hex().encode()
|
b996d71be8778b7c710344160ba0162a71ff3f4c7ef096fdaedf79a83fdaa566 | from django.core.exceptions import FieldDoesNotExist
from django.db import IntegrityError, connection, migrations, models, transaction
from django.db.migrations.migration import Migration
from django.db.migrations.operations.fields import FieldOperation
from django.db.migrations.state import ModelState, ProjectState
from django.db.models.functions import Abs
from django.db.transaction import atomic
from django.test import (
SimpleTestCase,
ignore_warnings,
override_settings,
skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext
from django.utils.deprecation import RemovedInDjango51Warning
from .models import FoodManager, FoodQuerySet, UnicodeModel
from .test_base import OperationTestBase
class Mixin:
pass
class OperationTests(OperationTestBase):
"""
Tests running the operations and making sure they do what they say they do.
Each test looks at their state changing, and then their database operation -
both forwards and backwards.
"""
def test_create_model(self):
"""
Tests the CreateModel operation.
Most other tests use this operation as part of setup, so check failures
here first.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
self.assertEqual(operation.describe(), "Create model Pony")
self.assertEqual(operation.migration_name_fragment, "pony")
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["fields", "name"])
# And default manager not in set
operation = migrations.CreateModel(
"Foo", fields=[], managers=[("objects", models.Manager())]
)
definition = operation.deconstruct()
self.assertNotIn("managers", definition[2])
def test_create_model_with_duplicate_field_name(self):
with self.assertRaisesMessage(
ValueError, "Found duplicate value pink in CreateModel fields argument."
):
migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.TextField()),
("pink", models.IntegerField(default=1)),
],
)
def test_create_model_with_duplicate_base(self):
message = "Found duplicate value test_crmo.pony in CreateModel bases argument."
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
"test_crmo.Pony",
"test_crmo.Pony",
),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
"test_crmo.Pony",
"test_crmo.pony",
),
)
message = (
"Found duplicate value migrations.unicodemodel in CreateModel bases "
"argument."
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
UnicodeModel,
UnicodeModel,
),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
UnicodeModel,
"migrations.unicodemodel",
),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
UnicodeModel,
"migrations.UnicodeModel",
),
)
message = (
"Found duplicate value <class 'django.db.models.base.Model'> in "
"CreateModel bases argument."
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
models.Model,
models.Model,
),
)
message = (
"Found duplicate value <class 'migrations.test_operations.Mixin'> in "
"CreateModel bases argument."
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
Mixin,
Mixin,
),
)
def test_create_model_with_duplicate_manager_name(self):
with self.assertRaisesMessage(
ValueError,
"Found duplicate value objects in CreateModel managers argument.",
):
migrations.CreateModel(
"Pony",
fields=[],
managers=[
("objects", models.Manager()),
("objects", models.Manager()),
],
)
def test_create_model_with_unique_after(self):
"""
Tests the CreateModel operation directly followed by an
AlterUniqueTogether (bug #22844 - sqlite remake issues)
"""
operation1 = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
operation2 = migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("number", models.IntegerField(default=1)),
("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)),
],
)
operation3 = migrations.AlterUniqueTogether(
"Rider",
[
("number", "pony"),
],
)
# Test the database alteration
project_state = ProjectState()
self.assertTableNotExists("test_crmoua_pony")
self.assertTableNotExists("test_crmoua_rider")
with connection.schema_editor() as editor:
new_state = project_state.clone()
operation1.state_forwards("test_crmoua", new_state)
operation1.database_forwards(
"test_crmoua", editor, project_state, new_state
)
project_state, new_state = new_state, new_state.clone()
operation2.state_forwards("test_crmoua", new_state)
operation2.database_forwards(
"test_crmoua", editor, project_state, new_state
)
project_state, new_state = new_state, new_state.clone()
operation3.state_forwards("test_crmoua", new_state)
operation3.database_forwards(
"test_crmoua", editor, project_state, new_state
)
self.assertTableExists("test_crmoua_pony")
self.assertTableExists("test_crmoua_rider")
def test_create_model_m2m(self):
"""
Test the creation of a model with a ManyToMany field and the
auto-created "through" model.
"""
project_state = self.set_up_test_model("test_crmomm")
operation = migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("Pony", related_name="stables")),
],
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_crmomm", new_state)
# Test the database alteration
self.assertTableNotExists("test_crmomm_stable_ponies")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmomm", editor, project_state, new_state)
self.assertTableExists("test_crmomm_stable")
self.assertTableExists("test_crmomm_stable_ponies")
self.assertColumnNotExists("test_crmomm_stable", "ponies")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_crmomm", "Pony")
Stable = new_state.apps.get_model("test_crmomm", "Stable")
stable = Stable.objects.create()
p1 = Pony.objects.create(pink=False, weight=4.55)
p2 = Pony.objects.create(pink=True, weight=5.43)
stable.ponies.add(p1, p2)
self.assertEqual(stable.ponies.count(), 2)
stable.ponies.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crmomm", editor, new_state, project_state
)
self.assertTableNotExists("test_crmomm_stable")
self.assertTableNotExists("test_crmomm_stable_ponies")
@skipUnlessDBFeature("supports_collation_on_charfield", "supports_foreign_keys")
def test_create_fk_models_to_pk_field_db_collation(self):
"""Creation of models with a FK to a PK with db_collation."""
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
app_label = "test_cfkmtopkfdbc"
operations = [
migrations.CreateModel(
"Pony",
[
(
"id",
models.CharField(
primary_key=True,
max_length=10,
db_collation=collation,
),
),
],
)
]
project_state = self.apply_operations(app_label, ProjectState(), operations)
# ForeignKey.
new_state = project_state.clone()
operation = migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
)
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnCollation(f"{app_label}_rider", "pony_id", collation)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
# OneToOneField.
new_state = project_state.clone()
operation = migrations.CreateModel(
"ShetlandPony",
[
(
"pony",
models.OneToOneField("Pony", models.CASCADE, primary_key=True),
),
("cuteness", models.IntegerField(default=1)),
],
)
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnCollation(f"{app_label}_shetlandpony", "pony_id", collation)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
def test_create_model_inheritance(self):
"""
Tests the CreateModel operation on a multi-table inheritance setup.
"""
project_state = self.set_up_test_model("test_crmoih")
# Test the state alteration
operation = migrations.CreateModel(
"ShetlandPony",
[
(
"pony_ptr",
models.OneToOneField(
"test_crmoih.Pony",
models.CASCADE,
auto_created=True,
primary_key=True,
to_field="id",
serialize=False,
),
),
("cuteness", models.IntegerField(default=1)),
],
)
new_state = project_state.clone()
operation.state_forwards("test_crmoih", new_state)
self.assertIn(("test_crmoih", "shetlandpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crmoih_shetlandpony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmoih", editor, project_state, new_state)
self.assertTableExists("test_crmoih_shetlandpony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crmoih", editor, new_state, project_state
)
self.assertTableNotExists("test_crmoih_shetlandpony")
def test_create_proxy_model(self):
"""
CreateModel ignores proxy models.
"""
project_state = self.set_up_test_model("test_crprmo")
# Test the state alteration
operation = migrations.CreateModel(
"ProxyPony",
[],
options={"proxy": True},
bases=("test_crprmo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_crprmo", new_state)
self.assertIn(("test_crprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crprmo", editor, project_state, new_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crprmo", editor, new_state, project_state
)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"])
def test_create_unmanaged_model(self):
"""
CreateModel ignores unmanaged models.
"""
project_state = self.set_up_test_model("test_crummo")
# Test the state alteration
operation = migrations.CreateModel(
"UnmanagedPony",
[],
options={"proxy": True},
bases=("test_crummo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony")
new_state = project_state.clone()
operation.state_forwards("test_crummo", new_state)
self.assertIn(("test_crummo", "unmanagedpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crummo", editor, project_state, new_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crummo", editor, new_state, project_state
)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
@skipUnlessDBFeature("supports_table_check_constraints")
def test_create_model_with_constraint(self):
where = models.Q(pink__gt=2)
check_constraint = models.CheckConstraint(
check=where, name="test_constraint_pony_pink_gt_2"
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
],
options={"constraints": [check_constraint]},
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
# Test database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
with connection.cursor() as cursor:
with self.assertRaises(IntegrityError):
cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)")
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2]["options"]["constraints"], [check_constraint])
@skipUnlessDBFeature("supports_table_check_constraints")
def test_create_model_with_boolean_expression_in_check_constraint(self):
app_label = "test_crmobechc"
rawsql_constraint = models.CheckConstraint(
check=models.expressions.RawSQL(
"price < %s", (1000,), output_field=models.BooleanField()
),
name=f"{app_label}_price_lt_1000_raw",
)
wrapper_constraint = models.CheckConstraint(
check=models.expressions.ExpressionWrapper(
models.Q(price__gt=500) | models.Q(price__lt=500),
output_field=models.BooleanField(),
),
name=f"{app_label}_price_neq_500_wrap",
)
operation = migrations.CreateModel(
"Product",
[
("id", models.AutoField(primary_key=True)),
("price", models.IntegerField(null=True)),
],
options={"constraints": [rawsql_constraint, wrapper_constraint]},
)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Add table.
self.assertTableNotExists(app_label)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertTableExists(f"{app_label}_product")
insert_sql = f"INSERT INTO {app_label}_product (id, price) VALUES (%d, %d)"
with connection.cursor() as cursor:
with self.assertRaises(IntegrityError):
cursor.execute(insert_sql % (1, 1000))
cursor.execute(insert_sql % (1, 999))
with self.assertRaises(IntegrityError):
cursor.execute(insert_sql % (2, 500))
cursor.execute(insert_sql % (2, 499))
def test_create_model_with_partial_unique_constraint(self):
partial_unique_constraint = models.UniqueConstraint(
fields=["pink"],
condition=models.Q(weight__gt=5),
name="test_constraint_pony_pink_for_weight_gt_5_uniq",
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options={"constraints": [partial_unique_constraint]},
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
# Test database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
# Test constraint works
Pony = new_state.apps.get_model("test_crmo", "Pony")
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2]["options"]["constraints"], [partial_unique_constraint]
)
def test_create_model_with_deferred_unique_constraint(self):
deferred_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="deferrable_pink_constraint",
deferrable=models.Deferrable.DEFERRED,
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
],
options={"constraints": [deferred_unique_constraint]},
)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
self.assertTableNotExists("test_crmo_pony")
# Create table.
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
Pony = new_state.apps.get_model("test_crmo", "Pony")
Pony.objects.create(pink=1)
if connection.features.supports_deferrable_unique_constraints:
# Unique constraint is deferred.
with transaction.atomic():
obj = Pony.objects.create(pink=1)
obj.pink = 2
obj.save()
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(
deferred_unique_constraint.name
)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
obj = Pony.objects.create(pink=1)
obj.pink = 3
obj.save()
else:
Pony.objects.create(pink=1)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2]["options"]["constraints"],
[deferred_unique_constraint],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_create_model_with_covering_unique_constraint(self):
covering_unique_constraint = models.UniqueConstraint(
fields=["pink"],
include=["weight"],
name="test_constraint_pony_pink_covering_weight",
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options={"constraints": [covering_unique_constraint]},
)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
self.assertTableNotExists("test_crmo_pony")
# Create table.
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
Pony = new_state.apps.get_model("test_crmo", "Pony")
Pony.objects.create(pink=1, weight=4.0)
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=7.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2]["options"]["constraints"],
[covering_unique_constraint],
)
def test_create_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_cmoma")
# Test the state alteration
operation = migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
self.assertEqual(operation.describe(), "Create model Food")
new_state = project_state.clone()
operation.state_forwards("test_cmoma", new_state)
self.assertIn(("test_cmoma", "food"), new_state.models)
managers = new_state.models["test_cmoma", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
def test_delete_model(self):
"""
Tests the DeleteModel operation.
"""
project_state = self.set_up_test_model("test_dlmo")
# Test the state alteration
operation = migrations.DeleteModel("Pony")
self.assertEqual(operation.describe(), "Delete model Pony")
self.assertEqual(operation.migration_name_fragment, "delete_pony")
new_state = project_state.clone()
operation.state_forwards("test_dlmo", new_state)
self.assertNotIn(("test_dlmo", "pony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlmo", editor, project_state, new_state)
self.assertTableNotExists("test_dlmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlmo", editor, new_state, project_state)
self.assertTableExists("test_dlmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "DeleteModel")
self.assertEqual(definition[1], [])
self.assertEqual(list(definition[2]), ["name"])
def test_delete_proxy_model(self):
"""
Tests the DeleteModel operation ignores proxy models.
"""
project_state = self.set_up_test_model("test_dlprmo", proxy_model=True)
# Test the state alteration
operation = migrations.DeleteModel("ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_dlprmo", new_state)
self.assertIn(("test_dlprmo", "proxypony"), project_state.models)
self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlprmo", editor, project_state, new_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_dlprmo", editor, new_state, project_state
)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
def test_delete_mti_model(self):
project_state = self.set_up_test_model("test_dlmtimo", mti_model=True)
# Test the state alteration
operation = migrations.DeleteModel("ShetlandPony")
new_state = project_state.clone()
operation.state_forwards("test_dlmtimo", new_state)
self.assertIn(("test_dlmtimo", "shetlandpony"), project_state.models)
self.assertNotIn(("test_dlmtimo", "shetlandpony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlmtimo_pony")
self.assertTableExists("test_dlmtimo_shetlandpony")
self.assertColumnExists("test_dlmtimo_shetlandpony", "pony_ptr_id")
with connection.schema_editor() as editor:
operation.database_forwards(
"test_dlmtimo", editor, project_state, new_state
)
self.assertTableExists("test_dlmtimo_pony")
self.assertTableNotExists("test_dlmtimo_shetlandpony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_dlmtimo", editor, new_state, project_state
)
self.assertTableExists("test_dlmtimo_pony")
self.assertTableExists("test_dlmtimo_shetlandpony")
self.assertColumnExists("test_dlmtimo_shetlandpony", "pony_ptr_id")
def test_rename_model(self):
"""
Tests the RenameModel operation.
"""
project_state = self.set_up_test_model("test_rnmo", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Pony", "Horse")
self.assertEqual(operation.describe(), "Rename model Pony to Horse")
self.assertEqual(operation.migration_name_fragment, "rename_pony_horse")
# Test initial state and database
self.assertIn(("test_rnmo", "pony"), project_state.models)
self.assertNotIn(("test_rnmo", "horse"), project_state.models)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")
)
self.assertFKNotExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")
)
# Migrate forwards
new_state = project_state.clone()
atomic_rename = connection.features.supports_atomic_references_rename
new_state = self.apply_operations(
"test_rnmo", new_state, [operation], atomic=atomic_rename
)
# Test new state and database
self.assertNotIn(("test_rnmo", "pony"), new_state.models)
self.assertIn(("test_rnmo", "horse"), new_state.models)
# RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual(
new_state.models["test_rnmo", "rider"].fields["pony"].remote_field.model,
"test_rnmo.Horse",
)
self.assertTableNotExists("test_rnmo_pony")
self.assertTableExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKNotExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")
)
self.assertFKExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")
)
# Migrate backwards
original_state = self.unapply_operations(
"test_rnmo", project_state, [operation], atomic=atomic_rename
)
# Test original state and database
self.assertIn(("test_rnmo", "pony"), original_state.models)
self.assertNotIn(("test_rnmo", "horse"), original_state.models)
self.assertEqual(
original_state.models["test_rnmo", "rider"]
.fields["pony"]
.remote_field.model,
"Pony",
)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")
)
self.assertFKNotExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"old_name": "Pony", "new_name": "Horse"})
def test_rename_model_state_forwards(self):
"""
RenameModel operations shouldn't trigger the caching of rendered apps
on state without prior apps.
"""
state = ProjectState()
state.add_model(ModelState("migrations", "Foo", []))
operation = migrations.RenameModel("Foo", "Bar")
operation.state_forwards("migrations", state)
self.assertNotIn("apps", state.__dict__)
self.assertNotIn(("migrations", "foo"), state.models)
self.assertIn(("migrations", "bar"), state.models)
# Now with apps cached.
apps = state.apps
operation = migrations.RenameModel("Bar", "Foo")
operation.state_forwards("migrations", state)
self.assertIs(state.apps, apps)
self.assertNotIn(("migrations", "bar"), state.models)
self.assertIn(("migrations", "foo"), state.models)
def test_rename_model_with_self_referential_fk(self):
"""
Tests the RenameModel operation on model with self referential FK.
"""
project_state = self.set_up_test_model("test_rmwsrf", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Rider", "HorseRider")
self.assertEqual(operation.describe(), "Rename model Rider to HorseRider")
new_state = project_state.clone()
operation.state_forwards("test_rmwsrf", new_state)
self.assertNotIn(("test_rmwsrf", "rider"), new_state.models)
self.assertIn(("test_rmwsrf", "horserider"), new_state.models)
# Remember, RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual(
"self",
new_state.models["test_rmwsrf", "horserider"]
.fields["friend"]
.remote_field.model,
)
HorseRider = new_state.apps.get_model("test_rmwsrf", "horserider")
self.assertIs(
HorseRider._meta.get_field("horserider").remote_field.model, HorseRider
)
# Test the database alteration
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")
)
self.assertFKNotExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")
)
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards("test_rmwsrf", editor, project_state, new_state)
self.assertTableNotExists("test_rmwsrf_rider")
self.assertTableExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKNotExists(
"test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")
)
self.assertFKExists(
"test_rmwsrf_horserider",
["friend_id"],
("test_rmwsrf_horserider", "id"),
)
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards(
"test_rmwsrf", editor, new_state, project_state
)
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")
)
self.assertFKNotExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")
)
def test_rename_model_with_superclass_fk(self):
"""
Tests the RenameModel operation on a model which has a superclass that
has a foreign key.
"""
project_state = self.set_up_test_model(
"test_rmwsc", related_model=True, mti_model=True
)
# Test the state alteration
operation = migrations.RenameModel("ShetlandPony", "LittleHorse")
self.assertEqual(
operation.describe(), "Rename model ShetlandPony to LittleHorse"
)
new_state = project_state.clone()
operation.state_forwards("test_rmwsc", new_state)
self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models)
self.assertIn(("test_rmwsc", "littlehorse"), new_state.models)
# RenameModel shouldn't repoint the superclass's relations, only local ones
self.assertEqual(
project_state.models["test_rmwsc", "rider"]
.fields["pony"]
.remote_field.model,
new_state.models["test_rmwsc", "rider"].fields["pony"].remote_field.model,
)
# Before running the migration we have a table for Shetland Pony, not
# Little Horse.
self.assertTableExists("test_rmwsc_shetlandpony")
self.assertTableNotExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# and the foreign key on rider points to pony, not shetland pony
self.assertFKExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")
)
self.assertFKNotExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")
)
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
operation.database_forwards("test_rmwsc", editor, project_state, new_state)
# Now we have a little horse table, not shetland pony
self.assertTableNotExists("test_rmwsc_shetlandpony")
self.assertTableExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# but the Foreign keys still point at pony, not little horse
self.assertFKExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")
)
self.assertFKNotExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")
)
def test_rename_model_with_self_referential_m2m(self):
app_label = "test_rename_model_with_self_referential_m2m"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"ReflexivePony",
fields=[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("self")),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("ReflexivePony", "ReflexivePony2"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "ReflexivePony2")
pony = Pony.objects.create()
pony.ponies.add(pony)
def test_rename_model_with_m2m(self):
app_label = "test_rename_model_with_m2m"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("Pony", "Pony2"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "Pony2")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(
Pony._meta.get_field("riders").remote_field.through.objects.count(), 2
)
def test_rename_model_with_db_table_rename_m2m(self):
app_label = "test_rmwdbrm2m"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
options={"db_table": "pony"},
),
],
)
new_state = self.apply_operations(
app_label,
project_state,
operations=[migrations.RenameModel("Pony", "PinkPony")],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = new_state.apps.get_model(app_label, "PinkPony")
Rider = new_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
def test_rename_m2m_target_model(self):
app_label = "test_rename_m2m_target_model"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("Rider", "Rider2"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider2")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(
Pony._meta.get_field("riders").remote_field.through.objects.count(), 2
)
def test_rename_m2m_through_model(self):
app_label = "test_rename_through"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"rider",
models.ForeignKey(
"test_rename_through.Rider", models.CASCADE
),
),
(
"pony",
models.ForeignKey(
"test_rename_through.Pony", models.CASCADE
),
),
],
),
migrations.AddField(
"Pony",
"riders",
models.ManyToManyField(
"test_rename_through.Rider",
through="test_rename_through.PonyRider",
),
),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider")
pony = Pony.objects.create()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("PonyRider", "PonyRider2"),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider2")
pony = Pony.objects.first()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
self.assertEqual(Pony.objects.count(), 1)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(PonyRider.objects.count(), 2)
self.assertEqual(pony.riders.count(), 2)
def test_rename_m2m_model_after_rename_field(self):
"""RenameModel renames a many-to-many column after a RenameField."""
app_label = "test_rename_multiple"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=20)),
],
),
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey(
"test_rename_multiple.Pony", models.CASCADE
),
),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
),
migrations.RenameField(
model_name="pony", old_name="name", new_name="fancy_name"
),
migrations.RenameModel(old_name="Rider", new_name="Jockey"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "Pony")
Jockey = project_state.apps.get_model(app_label, "Jockey")
PonyRider = project_state.apps.get_model(app_label, "PonyRider")
# No "no such column" error means the column was renamed correctly.
pony = Pony.objects.create(fancy_name="a good name")
jockey = Jockey.objects.create(pony=pony)
ponyrider = PonyRider.objects.create()
ponyrider.riders.add(jockey)
def test_add_field(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
self.assertEqual(operation.describe(), "Add field height to Pony")
self.assertEqual(operation.migration_name_fragment, "pony_height")
project_state, new_state = self.make_test_state("test_adfl", operation)
self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4)
field = new_state.models["test_adfl", "pony"].fields["height"]
self.assertEqual(field.default, 5)
# Test the database alteration
self.assertColumnNotExists("test_adfl_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfl", editor, project_state, new_state)
self.assertColumnExists("test_adfl_pony", "height")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfl", editor, new_state, project_state)
self.assertColumnNotExists("test_adfl_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_add_charfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adchfl")
Pony = project_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations(
"test_adchfl",
project_state,
[
migrations.AddField(
"Pony",
"text",
models.CharField(max_length=10, default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.CharField(max_length=10, default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.CharField(max_length=10, default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.CharField(max_length=10, default='"\'"'),
),
],
)
Pony = new_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_textfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adtxtfl")
Pony = project_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations(
"test_adtxtfl",
project_state,
[
migrations.AddField(
"Pony",
"text",
models.TextField(default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.TextField(default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.TextField(default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.TextField(default='"\'"'),
),
],
)
Pony = new_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_binaryfield(self):
"""
Tests the AddField operation on TextField/BinaryField.
"""
project_state = self.set_up_test_model("test_adbinfl")
Pony = project_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations(
"test_adbinfl",
project_state,
[
migrations.AddField(
"Pony",
"blob",
models.BinaryField(default=b"some text"),
),
migrations.AddField(
"Pony",
"empty",
models.BinaryField(default=b""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.BinaryField(default=b"42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.BinaryField(default=b'"\'"'),
),
],
)
Pony = new_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
# SQLite returns buffer/memoryview, cast to bytes for checking.
self.assertEqual(bytes(pony.blob), b"some text")
self.assertEqual(bytes(pony.empty), b"")
self.assertEqual(bytes(pony.digits), b"42")
self.assertEqual(bytes(pony.quotes), b'"\'"')
def test_column_name_quoting(self):
"""
Column names that are SQL keywords shouldn't cause problems when used
in migrations (#22168).
"""
project_state = self.set_up_test_model("test_regr22168")
operation = migrations.AddField(
"Pony",
"order",
models.IntegerField(default=0),
)
new_state = project_state.clone()
operation.state_forwards("test_regr22168", new_state)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_regr22168", editor, project_state, new_state
)
self.assertColumnExists("test_regr22168_pony", "order")
def test_add_field_preserve_default(self):
"""
Tests the AddField operation's state alteration
when preserve_default = False.
"""
project_state = self.set_up_test_model("test_adflpd")
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=4),
preserve_default=False,
)
new_state = project_state.clone()
operation.state_forwards("test_adflpd", new_state)
self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4)
field = new_state.models["test_adflpd", "pony"].fields["height"]
self.assertEqual(field.default, models.NOT_PROVIDED)
# Test the database alteration
project_state.apps.get_model("test_adflpd", "pony").objects.create(
weight=4,
)
self.assertColumnNotExists("test_adflpd_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflpd", editor, project_state, new_state)
self.assertColumnExists("test_adflpd_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(
sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]
)
def test_add_field_m2m(self):
"""
Tests the AddField operation with a ManyToManyField.
"""
project_state = self.set_up_test_model("test_adflmm", second_model=True)
# Test the state alteration
operation = migrations.AddField(
"Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")
)
new_state = project_state.clone()
operation.state_forwards("test_adflmm", new_state)
self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4)
# Test the database alteration
self.assertTableNotExists("test_adflmm_pony_stables")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflmm", editor, project_state, new_state)
self.assertTableExists("test_adflmm_pony_stables")
self.assertColumnNotExists("test_adflmm_pony", "stables")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_adflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.stables.create()
self.assertEqual(p.stables.count(), 1)
p.stables.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_adflmm", editor, new_state, project_state
)
self.assertTableNotExists("test_adflmm_pony_stables")
def test_alter_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True)
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable", related_name="ponies"),
)
],
)
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertFalse(Pony._meta.get_field("stables").blank)
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AlterField(
"Pony",
"stables",
models.ManyToManyField(
to="Stable", related_name="ponies", blank=True
),
)
],
)
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertTrue(Pony._meta.get_field("stables").blank)
def test_repoint_field_m2m(self):
project_state = self.set_up_test_model(
"test_alflmm", second_model=True, third_model=True
)
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AddField(
"Pony",
"places",
models.ManyToManyField("Stable", related_name="ponies"),
)
],
)
Pony = project_state.apps.get_model("test_alflmm", "Pony")
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AlterField(
"Pony",
"places",
models.ManyToManyField(to="Van", related_name="ponies"),
)
],
)
# Ensure the new field actually works
Pony = project_state.apps.get_model("test_alflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.places.create()
self.assertEqual(p.places.count(), 1)
p.places.all().delete()
def test_remove_field_m2m(self):
project_state = self.set_up_test_model("test_rmflmm", second_model=True)
project_state = self.apply_operations(
"test_rmflmm",
project_state,
operations=[
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable", related_name="ponies"),
)
],
)
self.assertTableExists("test_rmflmm_pony_stables")
with_field_state = project_state.clone()
operations = [migrations.RemoveField("Pony", "stables")]
project_state = self.apply_operations(
"test_rmflmm", project_state, operations=operations
)
self.assertTableNotExists("test_rmflmm_pony_stables")
# And test reversal
self.unapply_operations("test_rmflmm", with_field_state, operations=operations)
self.assertTableExists("test_rmflmm_pony_stables")
def test_remove_field_m2m_with_through(self):
project_state = self.set_up_test_model("test_rmflmmwt", second_model=True)
self.assertTableNotExists("test_rmflmmwt_ponystables")
project_state = self.apply_operations(
"test_rmflmmwt",
project_state,
operations=[
migrations.CreateModel(
"PonyStables",
fields=[
(
"pony",
models.ForeignKey("test_rmflmmwt.Pony", models.CASCADE),
),
(
"stable",
models.ForeignKey("test_rmflmmwt.Stable", models.CASCADE),
),
],
),
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField(
"Stable",
related_name="ponies",
through="test_rmflmmwt.PonyStables",
),
),
],
)
self.assertTableExists("test_rmflmmwt_ponystables")
operations = [
migrations.RemoveField("Pony", "stables"),
migrations.DeleteModel("PonyStables"),
]
self.apply_operations("test_rmflmmwt", project_state, operations=operations)
def test_remove_field(self):
"""
Tests the RemoveField operation.
"""
project_state = self.set_up_test_model("test_rmfl")
# Test the state alteration
operation = migrations.RemoveField("Pony", "pink")
self.assertEqual(operation.describe(), "Remove field pink from Pony")
self.assertEqual(operation.migration_name_fragment, "remove_pony_pink")
new_state = project_state.clone()
operation.state_forwards("test_rmfl", new_state)
self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2)
# Test the database alteration
self.assertColumnExists("test_rmfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_rmfl", editor, project_state, new_state)
self.assertColumnNotExists("test_rmfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmfl", editor, new_state, project_state)
self.assertColumnExists("test_rmfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": "pink"})
def test_remove_fk(self):
"""
Tests the RemoveField operation on a foreign key.
"""
project_state = self.set_up_test_model("test_rfk", related_model=True)
self.assertColumnExists("test_rfk_rider", "pony_id")
operation = migrations.RemoveField("Rider", "pony")
new_state = project_state.clone()
operation.state_forwards("test_rfk", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_rfk", editor, project_state, new_state)
self.assertColumnNotExists("test_rfk_rider", "pony_id")
with connection.schema_editor() as editor:
operation.database_backwards("test_rfk", editor, new_state, project_state)
self.assertColumnExists("test_rfk_rider", "pony_id")
def test_alter_model_table(self):
"""
Tests the AlterModelTable operation.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony_2")
self.assertEqual(
operation.describe(), "Rename table for Pony to test_almota_pony_2"
)
self.assertEqual(operation.migration_name_fragment, "alter_pony_table")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(
new_state.models["test_almota", "pony"].options["db_table"],
"test_almota_pony_2",
)
# Test the database alteration
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableNotExists("test_almota_pony")
self.assertTableExists("test_almota_pony_2")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_almota", editor, new_state, project_state
)
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelTable")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"name": "Pony", "table": "test_almota_pony_2"})
def test_alter_model_table_none(self):
"""
Tests the AlterModelTable operation if the table name is set to None.
"""
operation = migrations.AlterModelTable("Pony", None)
self.assertEqual(operation.describe(), "Rename table for Pony to (default)")
def test_alter_model_table_noop(self):
"""
Tests the AlterModelTable operation if the table name is not changed.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(
new_state.models["test_almota", "pony"].options["db_table"],
"test_almota_pony",
)
# Test the database alteration
self.assertTableExists("test_almota_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableExists("test_almota_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_almota", editor, new_state, project_state
)
self.assertTableExists("test_almota_pony")
def test_alter_model_table_m2m(self):
"""
AlterModelTable should rename auto-generated M2M tables.
"""
app_label = "test_talflmltlm2m"
pony_db_table = "pony_foo"
project_state = self.set_up_test_model(
app_label, second_model=True, db_table=pony_db_table
)
# Add the M2M field
first_state = project_state.clone()
operation = migrations.AddField(
"Pony", "stables", models.ManyToManyField("Stable")
)
operation.state_forwards(app_label, first_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, first_state)
original_m2m_table = "%s_%s" % (pony_db_table, "stables")
new_m2m_table = "%s_%s" % (app_label, "pony_stables")
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
# Rename the Pony db_table which should also rename the m2m table.
second_state = first_state.clone()
operation = migrations.AlterModelTable(name="pony", table=None)
operation.state_forwards(app_label, second_state)
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards(app_label, editor, first_state, second_state)
self.assertTableExists(new_m2m_table)
self.assertTableNotExists(original_m2m_table)
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards(app_label, editor, second_state, first_state)
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
def test_alter_model_table_m2m_field(self):
app_label = "test_talm2mfl"
project_state = self.set_up_test_model(app_label, second_model=True)
# Add the M2M field.
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable"),
)
],
)
m2m_table = f"{app_label}_pony_stables"
self.assertColumnExists(m2m_table, "pony_id")
self.assertColumnExists(m2m_table, "stable_id")
# Point the M2M field to self.
with_field_state = project_state.clone()
operations = [
migrations.AlterField(
model_name="Pony",
name="stables",
field=models.ManyToManyField("self"),
)
]
project_state = self.apply_operations(
app_label, project_state, operations=operations
)
self.assertColumnExists(m2m_table, "from_pony_id")
self.assertColumnExists(m2m_table, "to_pony_id")
# Reversal.
self.unapply_operations(app_label, with_field_state, operations=operations)
self.assertColumnExists(m2m_table, "pony_id")
self.assertColumnExists(m2m_table, "stable_id")
def test_alter_field(self):
"""
Tests the AlterField operation.
"""
project_state = self.set_up_test_model("test_alfl")
# Test the state alteration
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
self.assertEqual(operation.describe(), "Alter field pink on Pony")
self.assertEqual(operation.migration_name_fragment, "alter_pony_pink")
new_state = project_state.clone()
operation.state_forwards("test_alfl", new_state)
self.assertIs(
project_state.models["test_alfl", "pony"].fields["pink"].null, False
)
self.assertIs(new_state.models["test_alfl", "pony"].fields["pink"].null, True)
# Test the database alteration
self.assertColumnNotNull("test_alfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alfl", editor, project_state, new_state)
self.assertColumnNull("test_alfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alfl", editor, new_state, project_state)
self.assertColumnNotNull("test_alfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_alter_field_add_db_column_noop(self):
"""
AlterField operation is a noop when adding only a db_column and the
column name is not changed.
"""
app_label = "test_afadbn"
project_state = self.set_up_test_model(app_label, related_model=True)
pony_table = "%s_pony" % app_label
new_state = project_state.clone()
operation = migrations.AlterField(
"Pony", "weight", models.FloatField(db_column="weight")
)
operation.state_forwards(app_label, new_state)
self.assertIsNone(
project_state.models[app_label, "pony"].fields["weight"].db_column,
)
self.assertEqual(
new_state.models[app_label, "pony"].fields["weight"].db_column,
"weight",
)
self.assertColumnExists(pony_table, "weight")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnExists(pony_table, "weight")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
app_label, editor, new_state, project_state
)
self.assertColumnExists(pony_table, "weight")
rider_table = "%s_rider" % app_label
new_state = project_state.clone()
operation = migrations.AlterField(
"Rider",
"pony",
models.ForeignKey("Pony", models.CASCADE, db_column="pony_id"),
)
operation.state_forwards(app_label, new_state)
self.assertIsNone(
project_state.models[app_label, "rider"].fields["pony"].db_column,
)
self.assertIs(
new_state.models[app_label, "rider"].fields["pony"].db_column,
"pony_id",
)
self.assertColumnExists(rider_table, "pony_id")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnExists(rider_table, "pony_id")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(app_label, editor, new_state, project_state)
self.assertColumnExists(rider_table, "pony_id")
@skipUnlessDBFeature("supports_comments")
def test_alter_model_table_comment(self):
app_label = "test_almotaco"
project_state = self.set_up_test_model(app_label)
pony_table = f"{app_label}_pony"
# Add table comment.
operation = migrations.AlterModelTableComment("Pony", "Custom pony comment")
self.assertEqual(operation.describe(), "Alter Pony table comment")
self.assertEqual(operation.migration_name_fragment, "alter_pony_table_comment")
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
new_state.models[app_label, "pony"].options["db_table_comment"],
"Custom pony comment",
)
self.assertTableCommentNotExists(pony_table)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertTableComment(pony_table, "Custom pony comment")
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertTableCommentNotExists(pony_table)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelTableComment")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Pony", "table_comment": "Custom pony comment"}
)
def test_alter_field_pk(self):
"""
The AlterField operation on primary keys (things like PostgreSQL's
SERIAL weirdness).
"""
project_state = self.set_up_test_model("test_alflpk")
# Test the state alteration
operation = migrations.AlterField(
"Pony", "id", models.IntegerField(primary_key=True)
)
new_state = project_state.clone()
operation.state_forwards("test_alflpk", new_state)
self.assertIsInstance(
project_state.models["test_alflpk", "pony"].fields["id"],
models.AutoField,
)
self.assertIsInstance(
new_state.models["test_alflpk", "pony"].fields["id"],
models.IntegerField,
)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpk", editor, project_state, new_state)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alflpk", editor, new_state, project_state
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_pk_fk(self):
"""
Tests the AlterField operation on primary keys changes any FKs pointing to it.
"""
project_state = self.set_up_test_model("test_alflpkfk", related_model=True)
project_state = self.apply_operations(
"test_alflpkfk",
project_state,
[
migrations.CreateModel(
"Stable",
fields=[
("ponies", models.ManyToManyField("Pony")),
],
),
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable"),
),
],
)
# Test the state alteration
operation = migrations.AlterField(
"Pony", "id", models.FloatField(primary_key=True)
)
new_state = project_state.clone()
operation.state_forwards("test_alflpkfk", new_state)
self.assertIsInstance(
project_state.models["test_alflpkfk", "pony"].fields["id"],
models.AutoField,
)
self.assertIsInstance(
new_state.models["test_alflpkfk", "pony"].fields["id"],
models.FloatField,
)
def assertIdTypeEqualsFkType():
with connection.cursor() as cursor:
id_type, id_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor, "test_alflpkfk_pony"
)
if c.name == "id"
][0]
fk_type, fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor, "test_alflpkfk_rider"
)
if c.name == "pony_id"
][0]
m2m_fk_type, m2m_fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor,
"test_alflpkfk_pony_stables",
)
if c.name == "pony_id"
][0]
remote_m2m_fk_type, remote_m2m_fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor,
"test_alflpkfk_stable_ponies",
)
if c.name == "pony_id"
][0]
self.assertEqual(id_type, fk_type)
self.assertEqual(id_type, m2m_fk_type)
self.assertEqual(id_type, remote_m2m_fk_type)
self.assertEqual(id_null, fk_null)
self.assertEqual(id_null, m2m_fk_null)
self.assertEqual(id_null, remote_m2m_fk_null)
assertIdTypeEqualsFkType()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_alflpkfk", editor, project_state, new_state
)
assertIdTypeEqualsFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_alflpkfk_pony_stables",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
self.assertFKExists(
"test_alflpkfk_stable_ponies",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alflpkfk", editor, new_state, project_state
)
assertIdTypeEqualsFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_alflpkfk_pony_stables",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
self.assertFKExists(
"test_alflpkfk_stable_ponies",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
@skipUnlessDBFeature("supports_collation_on_charfield", "supports_foreign_keys")
def test_alter_field_pk_fk_db_collation(self):
"""
AlterField operation of db_collation on primary keys changes any FKs
pointing to it.
"""
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
app_label = "test_alflpkfkdbc"
project_state = self.apply_operations(
app_label,
ProjectState(),
[
migrations.CreateModel(
"Pony",
[
("id", models.CharField(primary_key=True, max_length=10)),
],
),
migrations.CreateModel(
"Rider",
[
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
),
migrations.CreateModel(
"Stable",
[
("ponies", models.ManyToManyField("Pony")),
],
),
],
)
# State alteration.
operation = migrations.AlterField(
"Pony",
"id",
models.CharField(
primary_key=True,
max_length=10,
db_collation=collation,
),
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Database alteration.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnCollation(f"{app_label}_pony", "id", collation)
self.assertColumnCollation(f"{app_label}_rider", "pony_id", collation)
self.assertColumnCollation(f"{app_label}_stable_ponies", "pony_id", collation)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
def test_alter_field_pk_mti_fk(self):
app_label = "test_alflpkmtifk"
project_state = self.set_up_test_model(app_label, mti_model=True)
project_state = self.apply_operations(
app_label,
project_state,
[
migrations.CreateModel(
"ShetlandRider",
fields=[
(
"pony",
models.ForeignKey(
f"{app_label}.ShetlandPony", models.CASCADE
),
),
],
),
],
)
operation = migrations.AlterField(
"Pony",
"id",
models.BigAutoField(primary_key=True),
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertIsInstance(
new_state.models[app_label, "pony"].fields["id"],
models.BigAutoField,
)
def _get_column_id_type(cursor, table, column):
return [
c.type_code
for c in connection.introspection.get_table_description(
cursor,
f"{app_label}_{table}",
)
if c.name == column
][0]
def assertIdTypeEqualsMTIFkType():
with connection.cursor() as cursor:
parent_id_type = _get_column_id_type(cursor, "pony", "id")
child_id_type = _get_column_id_type(
cursor, "shetlandpony", "pony_ptr_id"
)
mti_id_type = _get_column_id_type(cursor, "shetlandrider", "pony_id")
self.assertEqual(parent_id_type, child_id_type)
self.assertEqual(parent_id_type, mti_id_type)
assertIdTypeEqualsMTIFkType()
# Alter primary key.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_shetlandrider",
["pony_id"],
(f"{app_label}_shetlandpony", "pony_ptr_id"),
)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_shetlandrider",
["pony_id"],
(f"{app_label}_shetlandpony", "pony_ptr_id"),
)
def test_alter_field_pk_mti_and_fk_to_base(self):
app_label = "test_alflpkmtiftb"
project_state = self.set_up_test_model(
app_label,
mti_model=True,
related_model=True,
)
operation = migrations.AlterField(
"Pony",
"id",
models.BigAutoField(primary_key=True),
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertIsInstance(
new_state.models[app_label, "pony"].fields["id"],
models.BigAutoField,
)
def _get_column_id_type(cursor, table, column):
return [
c.type_code
for c in connection.introspection.get_table_description(
cursor,
f"{app_label}_{table}",
)
if c.name == column
][0]
def assertIdTypeEqualsMTIFkType():
with connection.cursor() as cursor:
parent_id_type = _get_column_id_type(cursor, "pony", "id")
fk_id_type = _get_column_id_type(cursor, "rider", "pony_id")
child_id_type = _get_column_id_type(
cursor, "shetlandpony", "pony_ptr_id"
)
self.assertEqual(parent_id_type, child_id_type)
self.assertEqual(parent_id_type, fk_id_type)
assertIdTypeEqualsMTIFkType()
# Alter primary key.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_rider",
["pony_id"],
(f"{app_label}_pony", "id"),
)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_rider",
["pony_id"],
(f"{app_label}_pony", "id"),
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_reloads_state_on_fk_with_to_field_target_type_change(self):
app_label = "test_alflrsfkwtflttc"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("code", models.IntegerField(unique=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
(
"rider",
models.ForeignKey(
"%s.Rider" % app_label, models.CASCADE, to_field="code"
),
),
],
),
],
)
operation = migrations.AlterField(
"Rider",
"code",
models.CharField(max_length=100, unique=True),
)
self.apply_operations(app_label, project_state, operations=[operation])
id_type, id_null = [
(c.type_code, c.null_ok)
for c in self.get_table_description("%s_rider" % app_label)
if c.name == "code"
][0]
fk_type, fk_null = [
(c.type_code, c.null_ok)
for c in self.get_table_description("%s_pony" % app_label)
if c.name == "rider_id"
][0]
self.assertEqual(id_type, fk_type)
self.assertEqual(id_null, fk_null)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_reloads_state_fk_with_to_field_related_name_target_type_change(
self,
):
app_label = "test_alflrsfkwtflrnttc"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("code", models.PositiveIntegerField(unique=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
(
"rider",
models.ForeignKey(
"%s.Rider" % app_label,
models.CASCADE,
to_field="code",
related_name="+",
),
),
],
),
],
)
operation = migrations.AlterField(
"Rider",
"code",
models.CharField(max_length=100, unique=True),
)
self.apply_operations(app_label, project_state, operations=[operation])
def test_alter_field_reloads_state_on_fk_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = "alter_alter_field_reloads_state_on_fk_target_changes"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
(
"rider",
models.ForeignKey("%s.Rider" % app_label, models.CASCADE),
),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey("%s.Pony" % app_label, models.CASCADE),
),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.AlterField(
"Rider", "id", models.CharField(primary_key=True, max_length=99)
),
migrations.AlterField(
"Pony", "id", models.CharField(primary_key=True, max_length=99)
),
],
)
def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = "alter_alter_field_reloads_state_on_fk_with_to_field_target_changes"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
("slug", models.CharField(unique=True, max_length=100)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
(
"rider",
models.ForeignKey(
"%s.Rider" % app_label, models.CASCADE, to_field="slug"
),
),
("slug", models.CharField(unique=True, max_length=100)),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey(
"%s.Pony" % app_label, models.CASCADE, to_field="slug"
),
),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.AlterField(
"Rider", "slug", models.CharField(unique=True, max_length=99)
),
migrations.AlterField(
"Pony", "slug", models.CharField(unique=True, max_length=99)
),
],
)
def test_alter_field_pk_fk_char_to_int(self):
app_label = "alter_field_pk_fk_char_to_int"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
name="Parent",
fields=[
("id", models.CharField(max_length=255, primary_key=True)),
],
),
migrations.CreateModel(
name="Child",
fields=[
("id", models.BigAutoField(primary_key=True)),
(
"parent",
models.ForeignKey(
f"{app_label}.Parent",
on_delete=models.CASCADE,
),
),
],
),
],
)
self.apply_operations(
app_label,
project_state,
operations=[
migrations.AlterField(
model_name="parent",
name="id",
field=models.BigIntegerField(primary_key=True),
),
],
)
def test_rename_field_reloads_state_on_fk_target_changes(self):
"""
If RenameField doesn't reload state appropriately, the AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = "alter_rename_field_reloads_state_on_fk_target_changes"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
(
"rider",
models.ForeignKey("%s.Rider" % app_label, models.CASCADE),
),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey("%s.Pony" % app_label, models.CASCADE),
),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameField("Rider", "id", "id2"),
migrations.AlterField(
"Pony", "id", models.CharField(primary_key=True, max_length=99)
),
],
atomic=connection.features.supports_atomic_references_rename,
)
def test_rename_field(self):
"""
Tests the RenameField operation.
"""
project_state = self.set_up_test_model("test_rnfl")
operation = migrations.RenameField("Pony", "pink", "blue")
self.assertEqual(operation.describe(), "Rename field pink on Pony to blue")
self.assertEqual(operation.migration_name_fragment, "rename_pink_pony_blue")
new_state = project_state.clone()
operation.state_forwards("test_rnfl", new_state)
self.assertIn("blue", new_state.models["test_rnfl", "pony"].fields)
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].fields)
# Rename field.
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnfl", editor, project_state, new_state)
self.assertColumnExists("test_rnfl_pony", "blue")
self.assertColumnNotExists("test_rnfl_pony", "pink")
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards("test_rnfl", editor, new_state, project_state)
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameField")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "old_name": "pink", "new_name": "blue"},
)
def test_rename_field_unique_together(self):
project_state = self.set_up_test_model("test_rnflut", unique_together=True)
operation = migrations.RenameField("Pony", "pink", "blue")
new_state = project_state.clone()
operation.state_forwards("test_rnflut", new_state)
# unique_together has the renamed column.
self.assertIn(
"blue",
new_state.models["test_rnflut", "pony"].options["unique_together"][0],
)
self.assertNotIn(
"pink",
new_state.models["test_rnflut", "pony"].options["unique_together"][0],
)
# Rename field.
self.assertColumnExists("test_rnflut_pony", "pink")
self.assertColumnNotExists("test_rnflut_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnflut", editor, project_state, new_state)
self.assertColumnExists("test_rnflut_pony", "blue")
self.assertColumnNotExists("test_rnflut_pony", "pink")
# The unique constraint has been ported over.
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_rnflut_pony (blue, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute(
"INSERT INTO test_rnflut_pony (blue, weight) VALUES (1, 1)"
)
cursor.execute("DELETE FROM test_rnflut_pony")
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(
"test_rnflut", editor, new_state, project_state
)
self.assertColumnExists("test_rnflut_pony", "pink")
self.assertColumnNotExists("test_rnflut_pony", "blue")
@ignore_warnings(category=RemovedInDjango51Warning)
def test_rename_field_index_together(self):
project_state = self.set_up_test_model("test_rnflit", index_together=True)
operation = migrations.RenameField("Pony", "pink", "blue")
new_state = project_state.clone()
operation.state_forwards("test_rnflit", new_state)
self.assertIn("blue", new_state.models["test_rnflit", "pony"].fields)
self.assertNotIn("pink", new_state.models["test_rnflit", "pony"].fields)
# index_together has the renamed column.
self.assertIn(
"blue", new_state.models["test_rnflit", "pony"].options["index_together"][0]
)
self.assertNotIn(
"pink", new_state.models["test_rnflit", "pony"].options["index_together"][0]
)
# Rename field.
self.assertColumnExists("test_rnflit_pony", "pink")
self.assertColumnNotExists("test_rnflit_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnflit", editor, project_state, new_state)
self.assertColumnExists("test_rnflit_pony", "blue")
self.assertColumnNotExists("test_rnflit_pony", "pink")
# The index constraint has been ported over.
self.assertIndexExists("test_rnflit_pony", ["weight", "blue"])
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(
"test_rnflit", editor, new_state, project_state
)
self.assertIndexExists("test_rnflit_pony", ["weight", "pink"])
def test_rename_field_with_db_column(self):
project_state = self.apply_operations(
"test_rfwdbc",
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(db_column="db_field")),
(
"fk_field",
models.ForeignKey(
"Pony",
models.CASCADE,
db_column="db_fk_field",
),
),
],
),
],
)
new_state = project_state.clone()
operation = migrations.RenameField("Pony", "field", "renamed_field")
operation.state_forwards("test_rfwdbc", new_state)
self.assertIn("renamed_field", new_state.models["test_rfwdbc", "pony"].fields)
self.assertNotIn("field", new_state.models["test_rfwdbc", "pony"].fields)
self.assertColumnExists("test_rfwdbc_pony", "db_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(
"test_rfwdbc", editor, project_state, new_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
"test_rfwdbc", editor, new_state, project_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_field")
new_state = project_state.clone()
operation = migrations.RenameField("Pony", "fk_field", "renamed_fk_field")
operation.state_forwards("test_rfwdbc", new_state)
self.assertIn(
"renamed_fk_field", new_state.models["test_rfwdbc", "pony"].fields
)
self.assertNotIn("fk_field", new_state.models["test_rfwdbc", "pony"].fields)
self.assertColumnExists("test_rfwdbc_pony", "db_fk_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(
"test_rfwdbc", editor, project_state, new_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_fk_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
"test_rfwdbc", editor, new_state, project_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_fk_field")
def test_rename_field_case(self):
project_state = self.apply_operations(
"test_rfmx",
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField()),
],
),
],
)
new_state = project_state.clone()
operation = migrations.RenameField("Pony", "field", "FiElD")
operation.state_forwards("test_rfmx", new_state)
self.assertIn("FiElD", new_state.models["test_rfmx", "pony"].fields)
self.assertColumnExists("test_rfmx_pony", "field")
with connection.schema_editor() as editor:
operation.database_forwards("test_rfmx", editor, project_state, new_state)
self.assertColumnExists(
"test_rfmx_pony",
connection.introspection.identifier_converter("FiElD"),
)
with connection.schema_editor() as editor:
operation.database_backwards("test_rfmx", editor, new_state, project_state)
self.assertColumnExists("test_rfmx_pony", "field")
def test_rename_missing_field(self):
state = ProjectState()
state.add_model(ModelState("app", "model", []))
with self.assertRaisesMessage(
FieldDoesNotExist, "app.model has no field named 'field'"
):
migrations.RenameField("model", "field", "new_field").state_forwards(
"app", state
)
def test_rename_referenced_field_state_forward(self):
state = ProjectState()
state.add_model(
ModelState(
"app",
"Model",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(unique=True)),
],
)
)
state.add_model(
ModelState(
"app",
"OtherModel",
[
("id", models.AutoField(primary_key=True)),
(
"fk",
models.ForeignKey("Model", models.CASCADE, to_field="field"),
),
(
"fo",
models.ForeignObject(
"Model",
models.CASCADE,
from_fields=("fk",),
to_fields=("field",),
),
),
],
)
)
operation = migrations.RenameField("Model", "field", "renamed")
new_state = state.clone()
operation.state_forwards("app", new_state)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fk"].remote_field.field_name,
"renamed",
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fk"].from_fields, ["self"]
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fk"].to_fields, ("renamed",)
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].from_fields, ("fk",)
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].to_fields, ("renamed",)
)
operation = migrations.RenameField("OtherModel", "fk", "renamed_fk")
new_state = state.clone()
operation.state_forwards("app", new_state)
self.assertEqual(
new_state.models["app", "othermodel"]
.fields["renamed_fk"]
.remote_field.field_name,
"renamed",
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["renamed_fk"].from_fields,
("self",),
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["renamed_fk"].to_fields,
("renamed",),
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].from_fields,
("renamed_fk",),
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].to_fields, ("renamed",)
)
def test_alter_unique_together(self):
"""
Tests the AlterUniqueTogether operation.
"""
project_state = self.set_up_test_model("test_alunto")
# Test the state alteration
operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")])
self.assertEqual(
operation.describe(), "Alter unique_together for Pony (1 constraint(s))"
)
self.assertEqual(
operation.migration_name_fragment,
"alter_pony_unique_together",
)
new_state = project_state.clone()
operation.state_forwards("test_alunto", new_state)
self.assertEqual(
len(
project_state.models["test_alunto", "pony"].options.get(
"unique_together", set()
)
),
0,
)
self.assertEqual(
len(
new_state.models["test_alunto", "pony"].options.get(
"unique_together", set()
)
),
1,
)
# Make sure we can insert duplicate rows
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_alunto", editor, project_state, new_state
)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute(
"INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)"
)
cursor.execute("DELETE FROM test_alunto_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alunto", editor, new_state, project_state
)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test flat unique_together
operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight"))
operation.state_forwards("test_alunto", new_state)
self.assertEqual(
len(
new_state.models["test_alunto", "pony"].options.get(
"unique_together", set()
)
),
1,
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterUniqueTogether")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Pony", "unique_together": {("pink", "weight")}}
)
def test_alter_unique_together_remove(self):
operation = migrations.AlterUniqueTogether("Pony", None)
self.assertEqual(
operation.describe(), "Alter unique_together for Pony (0 constraint(s))"
)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_unique_together_on_pk_field(self):
app_label = "test_rutopkf"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[("id", models.AutoField(primary_key=True))],
options={"unique_together": {("id",)}},
),
],
)
table_name = f"{app_label}_pony"
pk_constraint_name = f"{table_name}_pkey"
unique_together_constraint_name = f"{table_name}_id_fb61f881_uniq"
self.assertConstraintExists(table_name, pk_constraint_name, value=False)
self.assertConstraintExists(
table_name, unique_together_constraint_name, value=False
)
new_state = project_state.clone()
operation = migrations.AlterUniqueTogether("Pony", set())
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertConstraintExists(table_name, pk_constraint_name, value=False)
self.assertConstraintNotExists(table_name, unique_together_constraint_name)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_unique_together_on_unique_field(self):
app_label = "test_rutouf"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=30, unique=True)),
],
options={"unique_together": {("name",)}},
),
],
)
table_name = f"{app_label}_pony"
unique_constraint_name = f"{table_name}_name_key"
unique_together_constraint_name = f"{table_name}_name_694f3b9f_uniq"
self.assertConstraintExists(table_name, unique_constraint_name, value=False)
self.assertConstraintExists(
table_name, unique_together_constraint_name, value=False
)
new_state = project_state.clone()
operation = migrations.AlterUniqueTogether("Pony", set())
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertConstraintExists(table_name, unique_constraint_name, value=False)
self.assertConstraintNotExists(table_name, unique_together_constraint_name)
def test_add_index(self):
"""
Test the AddIndex operation.
"""
project_state = self.set_up_test_model("test_adin")
msg = (
"Indexes passed to AddIndex operations require a name argument. "
"<Index: fields=['pink']> doesn't have one."
)
with self.assertRaisesMessage(ValueError, msg):
migrations.AddIndex("Pony", models.Index(fields=["pink"]))
index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx")
operation = migrations.AddIndex("Pony", index)
self.assertEqual(
operation.describe(),
"Create index test_adin_pony_pink_idx on field(s) pink of model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"pony_test_adin_pony_pink_idx",
)
new_state = project_state.clone()
operation.state_forwards("test_adin", new_state)
# Test the database alteration
self.assertEqual(
len(new_state.models["test_adin", "pony"].options["indexes"]), 1
)
self.assertIndexNotExists("test_adin_pony", ["pink"])
with connection.schema_editor() as editor:
operation.database_forwards("test_adin", editor, project_state, new_state)
self.assertIndexExists("test_adin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adin", editor, new_state, project_state)
self.assertIndexNotExists("test_adin_pony", ["pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "index": index})
def test_remove_index(self):
"""
Test the RemoveIndex operation.
"""
project_state = self.set_up_test_model("test_rmin", multicol_index=True)
self.assertTableExists("test_rmin_pony")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
operation = migrations.RemoveIndex("Pony", "pony_test_idx")
self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony")
self.assertEqual(
operation.migration_name_fragment,
"remove_pony_pony_test_idx",
)
new_state = project_state.clone()
operation.state_forwards("test_rmin", new_state)
# Test the state alteration
self.assertEqual(
len(new_state.models["test_rmin", "pony"].options["indexes"]), 0
)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_rmin", editor, project_state, new_state)
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmin", editor, new_state, project_state)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": "pony_test_idx"})
# Also test a field dropped with index - sqlite remake issue
operations = [
migrations.RemoveIndex("Pony", "pony_test_idx"),
migrations.RemoveField("Pony", "pink"),
]
self.assertColumnExists("test_rmin_pony", "pink")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test database alteration
new_state = project_state.clone()
self.apply_operations("test_rmin", new_state, operations=operations)
self.assertColumnNotExists("test_rmin_pony", "pink")
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
self.unapply_operations("test_rmin", project_state, operations=operations)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
def test_rename_index(self):
app_label = "test_rnin"
project_state = self.set_up_test_model(app_label, index=True)
table_name = app_label + "_pony"
self.assertIndexNameExists(table_name, "pony_pink_idx")
self.assertIndexNameNotExists(table_name, "new_pony_test_idx")
operation = migrations.RenameIndex(
"Pony", new_name="new_pony_test_idx", old_name="pony_pink_idx"
)
self.assertEqual(
operation.describe(),
"Rename index pony_pink_idx on Pony to new_pony_test_idx",
)
self.assertEqual(
operation.migration_name_fragment,
"rename_pony_pink_idx_new_pony_test_idx",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Rename index.
expected_queries = 1 if connection.features.can_rename_index else 2
with connection.schema_editor() as editor, self.assertNumQueries(
expected_queries
):
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameNotExists(table_name, "pony_pink_idx")
self.assertIndexNameExists(table_name, "new_pony_test_idx")
# Reversal.
with connection.schema_editor() as editor, self.assertNumQueries(
expected_queries
):
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, "pony_pink_idx")
self.assertIndexNameNotExists(table_name, "new_pony_test_idx")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameIndex")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"old_name": "pony_pink_idx",
"new_name": "new_pony_test_idx",
},
)
def test_rename_index_arguments(self):
msg = "RenameIndex.old_name and old_fields are mutually exclusive."
with self.assertRaisesMessage(ValueError, msg):
migrations.RenameIndex(
"Pony",
new_name="new_idx_name",
old_name="old_idx_name",
old_fields=("weight", "pink"),
)
msg = "RenameIndex requires one of old_name and old_fields arguments to be set."
with self.assertRaisesMessage(ValueError, msg):
migrations.RenameIndex("Pony", new_name="new_idx_name")
@ignore_warnings(category=RemovedInDjango51Warning)
def test_rename_index_unnamed_index(self):
app_label = "test_rninui"
project_state = self.set_up_test_model(app_label, index_together=True)
table_name = app_label + "_pony"
self.assertIndexNameNotExists(table_name, "new_pony_test_idx")
operation = migrations.RenameIndex(
"Pony", new_name="new_pony_test_idx", old_fields=("weight", "pink")
)
self.assertEqual(
operation.describe(),
"Rename unnamed index for ('weight', 'pink') on Pony to new_pony_test_idx",
)
self.assertEqual(
operation.migration_name_fragment,
"rename_pony_weight_pink_new_pony_test_idx",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Rename index.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameExists(table_name, "new_pony_test_idx")
# Reverse is a no-op.
with connection.schema_editor() as editor, self.assertNumQueries(0):
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, "new_pony_test_idx")
# Reapply, RenameIndex operation is a noop when the old and new name
# match.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, "new_pony_test_idx")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameIndex")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"new_name": "new_pony_test_idx",
"old_fields": ("weight", "pink"),
},
)
def test_rename_index_unknown_unnamed_index(self):
app_label = "test_rninuui"
project_state = self.set_up_test_model(app_label)
operation = migrations.RenameIndex(
"Pony", new_name="new_pony_test_idx", old_fields=("weight", "pink")
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
msg = "Found wrong number (0) of indexes for test_rninuui_pony(weight, pink)."
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
operation.database_forwards(app_label, editor, project_state, new_state)
def test_add_index_state_forwards(self):
project_state = self.set_up_test_model("test_adinsf")
index = models.Index(fields=["pink"], name="test_adinsf_pony_pink_idx")
old_model = project_state.apps.get_model("test_adinsf", "Pony")
new_state = project_state.clone()
operation = migrations.AddIndex("Pony", index)
operation.state_forwards("test_adinsf", new_state)
new_model = new_state.apps.get_model("test_adinsf", "Pony")
self.assertIsNot(old_model, new_model)
def test_remove_index_state_forwards(self):
project_state = self.set_up_test_model("test_rminsf")
index = models.Index(fields=["pink"], name="test_rminsf_pony_pink_idx")
migrations.AddIndex("Pony", index).state_forwards("test_rminsf", project_state)
old_model = project_state.apps.get_model("test_rminsf", "Pony")
new_state = project_state.clone()
operation = migrations.RemoveIndex("Pony", "test_rminsf_pony_pink_idx")
operation.state_forwards("test_rminsf", new_state)
new_model = new_state.apps.get_model("test_rminsf", "Pony")
self.assertIsNot(old_model, new_model)
def test_rename_index_state_forwards(self):
app_label = "test_rnidsf"
project_state = self.set_up_test_model(app_label, index=True)
old_model = project_state.apps.get_model(app_label, "Pony")
new_state = project_state.clone()
operation = migrations.RenameIndex(
"Pony", new_name="new_pony_pink_idx", old_name="pony_pink_idx"
)
operation.state_forwards(app_label, new_state)
new_model = new_state.apps.get_model(app_label, "Pony")
self.assertIsNot(old_model, new_model)
self.assertEqual(new_model._meta.indexes[0].name, "new_pony_pink_idx")
@ignore_warnings(category=RemovedInDjango51Warning)
def test_rename_index_state_forwards_unnamed_index(self):
app_label = "test_rnidsfui"
project_state = self.set_up_test_model(app_label, index_together=True)
old_model = project_state.apps.get_model(app_label, "Pony")
new_state = project_state.clone()
operation = migrations.RenameIndex(
"Pony", new_name="new_pony_pink_idx", old_fields=("weight", "pink")
)
operation.state_forwards(app_label, new_state)
new_model = new_state.apps.get_model(app_label, "Pony")
self.assertIsNot(old_model, new_model)
self.assertEqual(new_model._meta.index_together, tuple())
self.assertEqual(new_model._meta.indexes[0].name, "new_pony_pink_idx")
@skipUnlessDBFeature("supports_expression_indexes")
def test_add_func_index(self):
app_label = "test_addfuncin"
index_name = f"{app_label}_pony_abs_idx"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
index = models.Index(Abs("weight"), name=index_name)
operation = migrations.AddIndex("Pony", index)
self.assertEqual(
operation.describe(),
"Create index test_addfuncin_pony_abs_idx on Abs(F(weight)) on model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"pony_test_addfuncin_pony_abs_idx",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(len(new_state.models[app_label, "pony"].options["indexes"]), 1)
self.assertIndexNameNotExists(table_name, index_name)
# Add index.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameExists(table_name, index_name)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameNotExists(table_name, index_name)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "index": index})
@skipUnlessDBFeature("supports_expression_indexes")
def test_remove_func_index(self):
app_label = "test_rmfuncin"
index_name = f"{app_label}_pony_abs_idx"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
indexes=[
models.Index(Abs("weight"), name=index_name),
],
)
self.assertTableExists(table_name)
self.assertIndexNameExists(table_name, index_name)
operation = migrations.RemoveIndex("Pony", index_name)
self.assertEqual(
operation.describe(),
"Remove index test_rmfuncin_pony_abs_idx from Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"remove_pony_test_rmfuncin_pony_abs_idx",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(len(new_state.models[app_label, "pony"].options["indexes"]), 0)
# Remove index.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameNotExists(table_name, index_name)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, index_name)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": index_name})
@skipUnlessDBFeature("supports_expression_indexes")
def test_alter_field_with_func_index(self):
app_label = "test_alfuncin"
index_name = f"{app_label}_pony_idx"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
indexes=[models.Index(Abs("pink"), name=index_name)],
)
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameExists(table_name, index_name)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, index_name)
def test_alter_field_with_index(self):
"""
Test AlterField operation with an index to ensure indexes created via
Meta.indexes don't get dropped with sqlite3 remake.
"""
project_state = self.set_up_test_model("test_alflin", index=True)
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
new_state = project_state.clone()
operation.state_forwards("test_alflin", new_state)
# Test the database alteration
self.assertColumnNotNull("test_alflin_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alflin", editor, project_state, new_state)
# Index hasn't been dropped
self.assertIndexExists("test_alflin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alflin", editor, new_state, project_state
)
# Ensure the index is still there
self.assertIndexExists("test_alflin_pony", ["pink"])
@ignore_warnings(category=RemovedInDjango51Warning)
def test_alter_index_together(self):
"""
Tests the AlterIndexTogether operation.
"""
project_state = self.set_up_test_model("test_alinto")
# Test the state alteration
operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")])
self.assertEqual(
operation.describe(), "Alter index_together for Pony (1 constraint(s))"
)
self.assertEqual(
operation.migration_name_fragment,
"alter_pony_index_together",
)
new_state = project_state.clone()
operation.state_forwards("test_alinto", new_state)
self.assertEqual(
len(
project_state.models["test_alinto", "pony"].options.get(
"index_together", set()
)
),
0,
)
self.assertEqual(
len(
new_state.models["test_alinto", "pony"].options.get(
"index_together", set()
)
),
1,
)
# Make sure there's no matching index
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alinto", editor, project_state, new_state)
self.assertIndexExists("test_alinto_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alinto", editor, new_state, project_state
)
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterIndexTogether")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Pony", "index_together": {("pink", "weight")}}
)
def test_alter_index_together_remove(self):
operation = migrations.AlterIndexTogether("Pony", None)
self.assertEqual(
operation.describe(), "Alter index_together for Pony (0 constraint(s))"
)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
@ignore_warnings(category=RemovedInDjango51Warning)
def test_alter_index_together_remove_with_unique_together(self):
app_label = "test_alintoremove_wunto"
table_name = "%s_pony" % app_label
project_state = self.set_up_test_model(app_label, unique_together=True)
self.assertUniqueConstraintExists(table_name, ["pink", "weight"])
# Add index together.
new_state = project_state.clone()
operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")])
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexExists(table_name, ["pink", "weight"])
# Remove index together.
project_state = new_state
new_state = project_state.clone()
operation = migrations.AlterIndexTogether("Pony", set())
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNotExists(table_name, ["pink", "weight"])
self.assertUniqueConstraintExists(table_name, ["pink", "weight"])
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_constraint(self):
project_state = self.set_up_test_model("test_addconstraint")
gt_check = models.Q(pink__gt=2)
gt_constraint = models.CheckConstraint(
check=gt_check, name="test_add_constraint_pony_pink_gt_2"
)
gt_operation = migrations.AddConstraint("Pony", gt_constraint)
self.assertEqual(
gt_operation.describe(),
"Create constraint test_add_constraint_pony_pink_gt_2 on model Pony",
)
self.assertEqual(
gt_operation.migration_name_fragment,
"pony_test_add_constraint_pony_pink_gt_2",
)
# Test the state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_addconstraint", new_state)
self.assertEqual(
len(new_state.models["test_addconstraint", "pony"].options["constraints"]),
1,
)
Pony = new_state.apps.get_model("test_addconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test the database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards(
"test_addconstraint", editor, project_state, new_state
)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=1.0)
# Add another one.
lt_check = models.Q(pink__lt=100)
lt_constraint = models.CheckConstraint(
check=lt_check, name="test_add_constraint_pony_pink_lt_100"
)
lt_operation = migrations.AddConstraint("Pony", lt_constraint)
lt_operation.state_forwards("test_addconstraint", new_state)
self.assertEqual(
len(new_state.models["test_addconstraint", "pony"].options["constraints"]),
2,
)
Pony = new_state.apps.get_model("test_addconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 2)
with connection.schema_editor() as editor:
lt_operation.database_forwards(
"test_addconstraint", editor, project_state, new_state
)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=100, weight=1.0)
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards(
"test_addconstraint", editor, new_state, project_state
)
Pony.objects.create(pink=1, weight=1.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"model_name": "Pony", "constraint": gt_constraint}
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_constraint_percent_escaping(self):
app_label = "add_constraint_string_quoting"
operations = [
migrations.CreateModel(
"Author",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("surname", models.CharField(max_length=100, default="")),
("rebate", models.CharField(max_length=100)),
],
),
]
from_state = self.apply_operations(app_label, ProjectState(), operations)
# "%" generated in startswith lookup should be escaped in a way that is
# considered a leading wildcard.
check = models.Q(name__startswith="Albert")
constraint = models.CheckConstraint(check=check, name="name_constraint")
operation = migrations.AddConstraint("Author", constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Author = to_state.apps.get_model(app_label, "Author")
with self.assertRaises(IntegrityError), transaction.atomic():
Author.objects.create(name="Artur")
# Literal "%" should be escaped in a way that is not a considered a
# wildcard.
check = models.Q(rebate__endswith="%")
constraint = models.CheckConstraint(check=check, name="rebate_constraint")
operation = migrations.AddConstraint("Author", constraint)
from_state = to_state
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
Author = to_state.apps.get_model(app_label, "Author")
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Author = to_state.apps.get_model(app_label, "Author")
with self.assertRaises(IntegrityError), transaction.atomic():
Author.objects.create(name="Albert", rebate="10$")
author = Author.objects.create(name="Albert", rebate="10%")
self.assertEqual(Author.objects.get(), author)
# Right-hand-side baked "%" literals should not be used for parameters
# interpolation.
check = ~models.Q(surname__startswith=models.F("name"))
constraint = models.CheckConstraint(check=check, name="name_constraint_rhs")
operation = migrations.AddConstraint("Author", constraint)
from_state = to_state
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Author = to_state.apps.get_model(app_label, "Author")
with self.assertRaises(IntegrityError), transaction.atomic():
Author.objects.create(name="Albert", surname="Alberto")
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_or_constraint(self):
app_label = "test_addorconstraint"
constraint_name = "add_constraint_or"
from_state = self.set_up_test_model(app_label)
check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0)
constraint = models.CheckConstraint(check=check, name=constraint_name)
operation = migrations.AddConstraint("Pony", constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Pony = to_state.apps.get_model(app_label, "Pony")
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=2, weight=3.0)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=3, weight=1.0)
Pony.objects.bulk_create(
[
Pony(pink=3, weight=-1.0),
Pony(pink=1, weight=-1.0),
Pony(pink=3, weight=3.0),
]
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_constraint_combinable(self):
app_label = "test_addconstraint_combinable"
operations = [
migrations.CreateModel(
"Book",
fields=[
("id", models.AutoField(primary_key=True)),
("read", models.PositiveIntegerField()),
("unread", models.PositiveIntegerField()),
],
),
]
from_state = self.apply_operations(app_label, ProjectState(), operations)
constraint = models.CheckConstraint(
check=models.Q(read=(100 - models.F("unread"))),
name="test_addconstraint_combinable_sum_100",
)
operation = migrations.AddConstraint("Book", constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Book = to_state.apps.get_model(app_label, "Book")
with self.assertRaises(IntegrityError), transaction.atomic():
Book.objects.create(read=70, unread=10)
Book.objects.create(read=70, unread=30)
def test_remove_constraint(self):
project_state = self.set_up_test_model(
"test_removeconstraint",
constraints=[
models.CheckConstraint(
check=models.Q(pink__gt=2),
name="test_remove_constraint_pony_pink_gt_2",
),
models.CheckConstraint(
check=models.Q(pink__lt=100),
name="test_remove_constraint_pony_pink_lt_100",
),
],
)
gt_operation = migrations.RemoveConstraint(
"Pony", "test_remove_constraint_pony_pink_gt_2"
)
self.assertEqual(
gt_operation.describe(),
"Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony",
)
self.assertEqual(
gt_operation.migration_name_fragment,
"remove_pony_test_remove_constraint_pony_pink_gt_2",
)
# Test state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_removeconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_removeconstraint", "pony"].options["constraints"]
),
1,
)
Pony = new_state.apps.get_model("test_removeconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards(
"test_removeconstraint", editor, project_state, new_state
)
Pony.objects.create(pink=1, weight=1.0).delete()
if connection.features.supports_table_check_constraints:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=100, weight=1.0)
else:
Pony.objects.create(pink=100, weight=1.0)
# Remove the other one.
lt_operation = migrations.RemoveConstraint(
"Pony", "test_remove_constraint_pony_pink_lt_100"
)
lt_operation.state_forwards("test_removeconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_removeconstraint", "pony"].options["constraints"]
),
0,
)
Pony = new_state.apps.get_model("test_removeconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor:
lt_operation.database_forwards(
"test_removeconstraint", editor, project_state, new_state
)
Pony.objects.create(pink=100, weight=1.0).delete()
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards(
"test_removeconstraint", editor, new_state, project_state
)
if connection.features.supports_table_check_constraints:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=1.0)
else:
Pony.objects.create(pink=1, weight=1.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "name": "test_remove_constraint_pony_pink_gt_2"},
)
def test_add_partial_unique_constraint(self):
project_state = self.set_up_test_model("test_addpartialuniqueconstraint")
partial_unique_constraint = models.UniqueConstraint(
fields=["pink"],
condition=models.Q(weight__gt=5),
name="test_constraint_pony_pink_for_weight_gt_5_uniq",
)
operation = migrations.AddConstraint("Pony", partial_unique_constraint)
self.assertEqual(
operation.describe(),
"Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq "
"on model Pony",
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_addpartialuniqueconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_addpartialuniqueconstraint", "pony"].options[
"constraints"
]
),
1,
)
Pony = new_state.apps.get_model("test_addpartialuniqueconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_addpartialuniqueconstraint", editor, project_state, new_state
)
# Test constraint works
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_addpartialuniqueconstraint", editor, new_state, project_state
)
# Test constraint doesn't work
Pony.objects.create(pink=1, weight=7.0)
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": partial_unique_constraint},
)
def test_remove_partial_unique_constraint(self):
project_state = self.set_up_test_model(
"test_removepartialuniqueconstraint",
constraints=[
models.UniqueConstraint(
fields=["pink"],
condition=models.Q(weight__gt=5),
name="test_constraint_pony_pink_for_weight_gt_5_uniq",
),
],
)
gt_operation = migrations.RemoveConstraint(
"Pony", "test_constraint_pony_pink_for_weight_gt_5_uniq"
)
self.assertEqual(
gt_operation.describe(),
"Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from "
"model Pony",
)
# Test state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_removepartialuniqueconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_removepartialuniqueconstraint", "pony"].options[
"constraints"
]
),
0,
)
Pony = new_state.apps.get_model("test_removepartialuniqueconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
# Test database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards(
"test_removepartialuniqueconstraint", editor, project_state, new_state
)
# Test constraint doesn't work
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
Pony.objects.create(pink=1, weight=7.0).delete()
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards(
"test_removepartialuniqueconstraint", editor, new_state, project_state
)
# Test constraint works
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"name": "test_constraint_pony_pink_for_weight_gt_5_uniq",
},
)
def test_add_deferred_unique_constraint(self):
app_label = "test_adddeferred_uc"
project_state = self.set_up_test_model(app_label)
deferred_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="deferred_pink_constraint_add",
deferrable=models.Deferrable.DEFERRED,
)
operation = migrations.AddConstraint("Pony", deferred_unique_constraint)
self.assertEqual(
operation.describe(),
"Create constraint deferred_pink_constraint_add on model Pony",
)
# Add constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 1
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony.objects.create(pink=1, weight=4.0)
if connection.features.supports_deferrable_unique_constraints:
# Unique constraint is deferred.
with transaction.atomic():
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 2
obj.save()
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(
deferred_unique_constraint.name
)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 3
obj.save()
else:
self.assertEqual(len(ctx), 0)
Pony.objects.create(pink=1, weight=4.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": deferred_unique_constraint},
)
def test_remove_deferred_unique_constraint(self):
app_label = "test_removedeferred_uc"
deferred_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="deferred_pink_constraint_rm",
deferrable=models.Deferrable.DEFERRED,
)
project_state = self.set_up_test_model(
app_label, constraints=[deferred_unique_constraint]
)
operation = migrations.RemoveConstraint("Pony", deferred_unique_constraint.name)
self.assertEqual(
operation.describe(),
"Remove constraint deferred_pink_constraint_rm from model Pony",
)
# Remove constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 0
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0).delete()
if not connection.features.supports_deferrable_unique_constraints:
self.assertEqual(len(ctx), 0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_deferrable_unique_constraints:
# Unique constraint is deferred.
with transaction.atomic():
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 2
obj.save()
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(
deferred_unique_constraint.name
)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 3
obj.save()
else:
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"name": "deferred_pink_constraint_rm",
},
)
def test_add_covering_unique_constraint(self):
app_label = "test_addcovering_uc"
project_state = self.set_up_test_model(app_label)
covering_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="covering_pink_constraint_add",
include=["weight"],
)
operation = migrations.AddConstraint("Pony", covering_unique_constraint)
self.assertEqual(
operation.describe(),
"Create constraint covering_pink_constraint_add on model Pony",
)
# Add constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 1
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony.objects.create(pink=1, weight=4.0)
if connection.features.supports_covering_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=4.0)
else:
self.assertEqual(len(ctx), 0)
Pony.objects.create(pink=1, weight=4.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": covering_unique_constraint},
)
def test_remove_covering_unique_constraint(self):
app_label = "test_removecovering_uc"
covering_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="covering_pink_constraint_rm",
include=["weight"],
)
project_state = self.set_up_test_model(
app_label, constraints=[covering_unique_constraint]
)
operation = migrations.RemoveConstraint("Pony", covering_unique_constraint.name)
self.assertEqual(
operation.describe(),
"Remove constraint covering_pink_constraint_rm from model Pony",
)
# Remove constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 0
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0).delete()
if not connection.features.supports_covering_indexes:
self.assertEqual(len(ctx), 0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_covering_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=4.0)
else:
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"name": "covering_pink_constraint_rm",
},
)
def test_alter_field_with_func_unique_constraint(self):
app_label = "test_alfuncuc"
constraint_name = f"{app_label}_pony_uq"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
constraints=[
models.UniqueConstraint("pink", "weight", name=constraint_name)
],
)
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
def test_add_func_unique_constraint(self):
app_label = "test_adfuncuc"
constraint_name = f"{app_label}_pony_abs_uq"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
constraint = models.UniqueConstraint(Abs("weight"), name=constraint_name)
operation = migrations.AddConstraint("Pony", constraint)
self.assertEqual(
operation.describe(),
"Create constraint test_adfuncuc_pony_abs_uq on model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"pony_test_adfuncuc_pony_abs_uq",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 1
)
self.assertIndexNameNotExists(table_name, constraint_name)
# Add constraint.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony = new_state.apps.get_model(app_label, "Pony")
Pony.objects.create(weight=4.0)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
with self.assertRaises(IntegrityError):
Pony.objects.create(weight=-4.0)
else:
self.assertIndexNameNotExists(table_name, constraint_name)
Pony.objects.create(weight=-4.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameNotExists(table_name, constraint_name)
# Constraint doesn't work.
Pony.objects.create(weight=-4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": constraint},
)
def test_remove_func_unique_constraint(self):
app_label = "test_rmfuncuc"
constraint_name = f"{app_label}_pony_abs_uq"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
constraints=[
models.UniqueConstraint(Abs("weight"), name=constraint_name),
],
)
self.assertTableExists(table_name)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
operation = migrations.RemoveConstraint("Pony", constraint_name)
self.assertEqual(
operation.describe(),
"Remove constraint test_rmfuncuc_pony_abs_uq from model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"remove_pony_test_rmfuncuc_pony_abs_uq",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 0
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
# Remove constraint.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameNotExists(table_name, constraint_name)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=-4.0).delete()
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
with self.assertRaises(IntegrityError):
Pony.objects.create(weight=-4.0)
else:
self.assertIndexNameNotExists(table_name, constraint_name)
Pony.objects.create(weight=-4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": constraint_name})
def test_alter_model_options(self):
"""
Tests the AlterModelOptions operation.
"""
project_state = self.set_up_test_model("test_almoop")
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions(
"Pony", {"permissions": [("can_groom", "Can groom")]}
)
self.assertEqual(operation.describe(), "Change Meta options on Pony")
self.assertEqual(operation.migration_name_fragment, "alter_pony_options")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(
len(
project_state.models["test_almoop", "pony"].options.get(
"permissions", []
)
),
0,
)
self.assertEqual(
len(new_state.models["test_almoop", "pony"].options.get("permissions", [])),
1,
)
self.assertEqual(
new_state.models["test_almoop", "pony"].options["permissions"][0][0],
"can_groom",
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"name": "Pony", "options": {"permissions": [("can_groom", "Can groom")]}},
)
def test_alter_model_options_emptying(self):
"""
The AlterModelOptions operation removes keys from the dict (#23121)
"""
project_state = self.set_up_test_model("test_almoop", options=True)
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(
len(
project_state.models["test_almoop", "pony"].options.get(
"permissions", []
)
),
1,
)
self.assertEqual(
len(new_state.models["test_almoop", "pony"].options.get("permissions", [])),
0,
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"name": "Pony", "options": {}})
def test_alter_order_with_respect_to(self):
"""
Tests the AlterOrderWithRespectTo operation.
"""
project_state = self.set_up_test_model("test_alorwrtto", related_model=True)
# Test the state alteration
operation = migrations.AlterOrderWithRespectTo("Rider", "pony")
self.assertEqual(
operation.describe(), "Set order_with_respect_to on Rider to pony"
)
self.assertEqual(
operation.migration_name_fragment,
"alter_rider_order_with_respect_to",
)
new_state = project_state.clone()
operation.state_forwards("test_alorwrtto", new_state)
self.assertIsNone(
project_state.models["test_alorwrtto", "rider"].options.get(
"order_with_respect_to", None
)
)
self.assertEqual(
new_state.models["test_alorwrtto", "rider"].options.get(
"order_with_respect_to", None
),
"pony",
)
# Make sure there's no matching index
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# Create some rows before alteration
rendered_state = project_state.apps
pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(
weight=50
)
rider1 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(
pony=pony
)
rider1.friend = rider1
rider1.save()
rider2 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(
pony=pony
)
rider2.friend = rider2
rider2.save()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_alorwrtto", editor, project_state, new_state
)
self.assertColumnExists("test_alorwrtto_rider", "_order")
# Check for correct value in rows
updated_riders = new_state.apps.get_model(
"test_alorwrtto", "Rider"
).objects.all()
self.assertEqual(updated_riders[0]._order, 0)
self.assertEqual(updated_riders[1]._order, 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alorwrtto", editor, new_state, project_state
)
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterOrderWithRespectTo")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Rider", "order_with_respect_to": "pony"}
)
def test_alter_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almoma")
# Test the state alteration
operation = migrations.AlterModelManagers(
"Pony",
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
self.assertEqual(operation.describe(), "Change managers on Pony")
self.assertEqual(operation.migration_name_fragment, "alter_pony_managers")
managers = project_state.models["test_almoma", "pony"].managers
self.assertEqual(managers, [])
new_state = project_state.clone()
operation.state_forwards("test_almoma", new_state)
self.assertIn(("test_almoma", "pony"), new_state.models)
managers = new_state.models["test_almoma", "pony"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
rendered_state = new_state.apps
model = rendered_state.get_model("test_almoma", "pony")
self.assertIsInstance(model.food_qs, models.Manager)
self.assertIsInstance(model.food_mgr, FoodManager)
self.assertIsInstance(model.food_mgr_kwargs, FoodManager)
def test_alter_model_managers_emptying(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almomae", manager_model=True)
# Test the state alteration
operation = migrations.AlterModelManagers("Food", managers=[])
self.assertEqual(operation.describe(), "Change managers on Food")
self.assertIn(("test_almomae", "food"), project_state.models)
managers = project_state.models["test_almomae", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
new_state = project_state.clone()
operation.state_forwards("test_almomae", new_state)
managers = new_state.models["test_almomae", "food"].managers
self.assertEqual(managers, [])
def test_alter_fk(self):
"""
Creating and then altering an FK works correctly
and deals with the pending SQL (#23091)
"""
project_state = self.set_up_test_model("test_alfk")
# Test adding and then altering the FK in one go
create_operation = migrations.CreateModel(
name="Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
)
create_state = project_state.clone()
create_operation.state_forwards("test_alfk", create_state)
alter_operation = migrations.AlterField(
model_name="Rider",
name="pony",
field=models.ForeignKey("Pony", models.CASCADE, editable=False),
)
alter_state = create_state.clone()
alter_operation.state_forwards("test_alfk", alter_state)
with connection.schema_editor() as editor:
create_operation.database_forwards(
"test_alfk", editor, project_state, create_state
)
alter_operation.database_forwards(
"test_alfk", editor, create_state, alter_state
)
def test_alter_fk_non_fk(self):
"""
Altering an FK to a non-FK works (#23244)
"""
# Test the state alteration
operation = migrations.AlterField(
model_name="Rider",
name="pony",
field=models.FloatField(),
)
project_state, new_state = self.make_test_state(
"test_afknfk", operation, related_model=True
)
# Test the database alteration
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_afknfk", editor, project_state, new_state)
self.assertColumnExists("test_afknfk_rider", "pony")
self.assertColumnNotExists("test_afknfk_rider", "pony_id")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_afknfk", editor, new_state, project_state
)
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
def test_run_sql(self):
"""
Tests the RunSQL operation.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
# Use a multi-line string with a comment to test splitting on
# SQLite and MySQL respectively.
"CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n"
"INSERT INTO i_love_ponies (id, special_thing) "
"VALUES (1, 'i love ponies'); -- this is magic!\n"
"INSERT INTO i_love_ponies (id, special_thing) "
"VALUES (2, 'i love django');\n"
"UPDATE i_love_ponies SET special_thing = 'Ponies' "
"WHERE special_thing LIKE '%%ponies';"
"UPDATE i_love_ponies SET special_thing = 'Django' "
"WHERE special_thing LIKE '%django';",
# Run delete queries to test for parameter substitution failure
# reported in #23426
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';"
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';"
"DROP TABLE i_love_ponies",
state_operations=[
migrations.CreateModel(
"SomethingElse", [("id", models.AutoField(primary_key=True))]
)
],
)
self.assertEqual(operation.describe(), "Raw SQL operation")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_runsql", new_state)
self.assertEqual(
len(new_state.models["test_runsql", "somethingelse"].fields), 1
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test SQL collection
with connection.schema_editor(collect_sql=True) as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql))
operation.database_backwards(
"test_runsql", editor, project_state, new_state
)
self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql))
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 2)
cursor.execute(
"SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'"
)
self.assertEqual(cursor.fetchall()[0][0], 1)
cursor.execute(
"SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'"
)
self.assertEqual(cursor.fetchall()[0][0], 1)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunSQL")
self.assertEqual(definition[1], [])
self.assertEqual(
sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]
)
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunSQL("SELECT 1 FROM void;", elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_sql_params(self):
"""
#23426 - RunSQL should accept parameters.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"],
["DROP TABLE i_love_ponies"],
)
param_operation = migrations.RunSQL(
# forwards
(
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');",
[
"INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);",
["Ponies"],
],
(
"INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);",
(
3,
"Python",
),
),
),
# backwards
[
"DELETE FROM i_love_ponies WHERE special_thing = 'Django';",
["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None],
(
"DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;",
[3, "Python"],
),
],
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
new_state = project_state.clone()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
# Test parameter passing
with connection.schema_editor() as editor:
param_operation.database_forwards(
"test_runsql", editor, project_state, new_state
)
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 3)
with connection.schema_editor() as editor:
param_operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
self.assertTableNotExists("i_love_ponies")
def test_run_sql_params_invalid(self):
"""
#23426 - RunSQL should fail when a list of statements with an incorrect
number of tuples is given.
"""
project_state = self.set_up_test_model("test_runsql")
new_state = project_state.clone()
operation = migrations.RunSQL(
# forwards
[["INSERT INTO foo (bar) VALUES ('buz');"]],
# backwards
(("DELETE FROM foo WHERE bar = 'buz';", "invalid", "parameter count"),),
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"):
operation.database_forwards(
"test_runsql", editor, project_state, new_state
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"):
operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
def test_run_sql_noop(self):
"""
#24098 - Tests no-op RunSQL operations.
"""
operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, None, None)
operation.database_backwards("test_runsql", editor, None, None)
def test_run_sql_add_missing_semicolon_on_collect_sql(self):
project_state = self.set_up_test_model("test_runsql")
new_state = project_state.clone()
tests = [
"INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1);\n",
"INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1)\n",
]
for sql in tests:
with self.subTest(sql=sql):
operation = migrations.RunSQL(sql, migrations.RunPython.noop)
with connection.schema_editor(collect_sql=True) as editor:
operation.database_forwards(
"test_runsql", editor, project_state, new_state
)
collected_sql = "\n".join(editor.collected_sql)
self.assertEqual(collected_sql.count(";"), 1)
def test_run_python(self):
"""
Tests the RunPython operation
"""
project_state = self.set_up_test_model("test_runpython", mti_model=True)
# Create the operation
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.create(pink=1, weight=3.55)
Pony.objects.create(weight=5)
def inner_method_reverse(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.filter(pink=1, weight=3.55).delete()
Pony.objects.filter(weight=5).delete()
operation = migrations.RunPython(
inner_method, reverse_code=inner_method_reverse
)
self.assertEqual(operation.describe(), "Raw Python operation")
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards("test_runpython", new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0
)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2
)
# Now test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0
)
# Now test we can't use a string
with self.assertRaisesMessage(
ValueError, "RunPython must be supplied with a callable"
):
migrations.RunPython("print 'ahahaha'")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code", "reverse_code"])
# Also test reversal fails, with an operation identical to above but
# without reverse_code set.
no_reverse_operation = migrations.RunPython(inner_method)
self.assertFalse(no_reverse_operation.reversible)
with connection.schema_editor() as editor:
no_reverse_operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
with self.assertRaises(NotImplementedError):
no_reverse_operation.database_backwards(
"test_runpython", editor, new_state, project_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2
)
def create_ponies(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
pony1 = Pony.objects.create(pink=1, weight=3.55)
self.assertIsNot(pony1.pk, None)
pony2 = Pony.objects.create(weight=5)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_ponies)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code"])
def create_shetlandponies(models, schema_editor):
ShetlandPony = models.get_model("test_runpython", "ShetlandPony")
pony1 = ShetlandPony.objects.create(weight=4.0)
self.assertIsNot(pony1.pk, None)
pony2 = ShetlandPony.objects.create(weight=5.0)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_shetlandponies)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6
)
self.assertEqual(
project_state.apps.get_model(
"test_runpython", "ShetlandPony"
).objects.count(),
2,
)
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunPython(inner_method, elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_python_atomic(self):
"""
Tests the RunPython operation correctly handles the "atomic" keyword
"""
project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True)
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpythonatomic", "Pony")
Pony.objects.create(pink=1, weight=3.55)
raise ValueError("Adrian hates ponies.")
# Verify atomicity when applying.
atomic_migration = Migration("test", "test_runpythonatomic")
atomic_migration.operations = [
migrations.RunPython(inner_method, reverse_code=inner_method)
]
non_atomic_migration = Migration("test", "test_runpythonatomic")
non_atomic_migration.operations = [
migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)
]
# If we're a fully-transactional database, both versions should rollback
if connection.features.can_rollback_ddl:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
# Otherwise, the non-atomic operation should leave a row there
else:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
1,
)
# Reset object count to zero and verify atomicity when unapplying.
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.all().delete()
# On a fully-transactional database, both versions rollback.
if connection.features.can_rollback_ddl:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
# Otherwise, the non-atomic operation leaves a row there.
else:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
1,
)
# Verify deconstruction.
definition = non_atomic_migration.operations[0].deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"])
def test_run_python_related_assignment(self):
"""
#24282 - Model changes to a FK reverse side update the model
on the FK side as well.
"""
def inner_method(models, schema_editor):
Author = models.get_model("test_authors", "Author")
Book = models.get_model("test_books", "Book")
author = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author)
create_author = migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
("author", models.ForeignKey("test_authors.Author", models.CASCADE)),
],
options={},
)
add_hometown = migrations.AddField(
"Author",
"hometown",
models.CharField(max_length=100),
)
create_old_man = migrations.RunPython(inner_method, inner_method)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_authors", new_state)
create_author.database_forwards(
"test_authors", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_books", new_state)
create_book.database_forwards(
"test_books", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
add_hometown.state_forwards("test_authors", new_state)
add_hometown.database_forwards(
"test_authors", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_old_man.state_forwards("test_books", new_state)
create_old_man.database_forwards(
"test_books", editor, project_state, new_state
)
def test_model_with_bigautofield(self):
"""
A model with BigAutoField can be created.
"""
def create_data(models, schema_editor):
Author = models.get_model("test_author", "Author")
Book = models.get_model("test_book", "Book")
author1 = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author1)
Book.objects.create(id=2**33, title="A farewell to arms", author=author1)
author2 = Author.objects.create(id=2**33, name="Remarque")
Book.objects.create(title="All quiet on the western front", author=author2)
Book.objects.create(title="Arc de Triomphe", author=author2)
create_author = migrations.CreateModel(
"Author",
[
("id", models.BigAutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.BigAutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
(
"author",
models.ForeignKey(
to="test_author.Author", on_delete=models.CASCADE
),
),
],
options={},
)
fill_data = migrations.RunPython(create_data)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_author", new_state)
create_author.database_forwards(
"test_author", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_book", new_state)
create_book.database_forwards("test_book", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_data.state_forwards("fill_data", new_state)
fill_data.database_forwards("fill_data", editor, project_state, new_state)
def _test_autofield_foreignfield_growth(
self, source_field, target_field, target_value
):
"""
A field may be migrated in the following ways:
- AutoField to BigAutoField
- SmallAutoField to AutoField
- SmallAutoField to BigAutoField
"""
def create_initial_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog = Blog.objects.create(name="web development done right")
Article.objects.create(name="Frameworks", blog=blog)
Article.objects.create(name="Programming Languages", blog=blog)
def create_big_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog2 = Blog.objects.create(name="Frameworks", id=target_value)
Article.objects.create(name="Django", blog=blog2)
Article.objects.create(id=target_value, name="Django2", blog=blog2)
create_blog = migrations.CreateModel(
"Blog",
[
("id", source_field(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_article = migrations.CreateModel(
"Article",
[
("id", source_field(primary_key=True)),
(
"blog",
models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE),
),
("name", models.CharField(max_length=100)),
("data", models.TextField(default="")),
],
options={},
)
fill_initial_data = migrations.RunPython(
create_initial_data, create_initial_data
)
fill_big_data = migrations.RunPython(create_big_data, create_big_data)
grow_article_id = migrations.AlterField(
"Article", "id", target_field(primary_key=True)
)
grow_blog_id = migrations.AlterField(
"Blog", "id", target_field(primary_key=True)
)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_blog.state_forwards("test_blog", new_state)
create_blog.database_forwards("test_blog", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_article.state_forwards("test_article", new_state)
create_article.database_forwards(
"test_article", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_initial_data.state_forwards("fill_initial_data", new_state)
fill_initial_data.database_forwards(
"fill_initial_data", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_article_id.state_forwards("test_article", new_state)
grow_article_id.database_forwards(
"test_article", editor, project_state, new_state
)
state = new_state.clone()
article = state.apps.get_model("test_article.Article")
self.assertIsInstance(article._meta.pk, target_field)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_blog_id.state_forwards("test_blog", new_state)
grow_blog_id.database_forwards(
"test_blog", editor, project_state, new_state
)
state = new_state.clone()
blog = state.apps.get_model("test_blog.Blog")
self.assertIsInstance(blog._meta.pk, target_field)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_big_data.state_forwards("fill_big_data", new_state)
fill_big_data.database_forwards(
"fill_big_data", editor, project_state, new_state
)
def test_autofield__bigautofield_foreignfield_growth(self):
"""A field may be migrated from AutoField to BigAutoField."""
self._test_autofield_foreignfield_growth(
models.AutoField,
models.BigAutoField,
2**33,
)
def test_smallfield_autofield_foreignfield_growth(self):
"""A field may be migrated from SmallAutoField to AutoField."""
self._test_autofield_foreignfield_growth(
models.SmallAutoField,
models.AutoField,
2**22,
)
def test_smallfield_bigautofield_foreignfield_growth(self):
"""A field may be migrated from SmallAutoField to BigAutoField."""
self._test_autofield_foreignfield_growth(
models.SmallAutoField,
models.BigAutoField,
2**33,
)
def test_run_python_noop(self):
"""
#24098 - Tests no-op RunPython operations.
"""
project_state = ProjectState()
new_state = project_state.clone()
operation = migrations.RunPython(
migrations.RunPython.noop, migrations.RunPython.noop
)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
operation.database_backwards(
"test_runpython", editor, new_state, project_state
)
def test_separate_database_and_state(self):
"""
Tests the SeparateDatabaseAndState operation.
"""
project_state = self.set_up_test_model("test_separatedatabaseandstate")
# Create the operation
database_operation = migrations.RunSQL(
"CREATE TABLE i_love_ponies (id int, special_thing int);",
"DROP TABLE i_love_ponies;",
)
state_operation = migrations.CreateModel(
"SomethingElse", [("id", models.AutoField(primary_key=True))]
)
operation = migrations.SeparateDatabaseAndState(
state_operations=[state_operation], database_operations=[database_operation]
)
self.assertEqual(
operation.describe(), "Custom state/database change combination"
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_separatedatabaseandstate", new_state)
self.assertEqual(
len(
new_state.models[
"test_separatedatabaseandstate", "somethingelse"
].fields
),
1,
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_separatedatabaseandstate", editor, project_state, new_state
)
self.assertTableExists("i_love_ponies")
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(
"test_separatedatabaseandstate", editor, new_state, project_state
)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "SeparateDatabaseAndState")
self.assertEqual(definition[1], [])
self.assertEqual(
sorted(definition[2]), ["database_operations", "state_operations"]
)
def test_separate_database_and_state2(self):
"""
A complex SeparateDatabaseAndState operation: Multiple operations both
for state and database. Verify the state dependencies within each list
and that state ops don't affect the database.
"""
app_label = "test_separatedatabaseandstate2"
project_state = self.set_up_test_model(app_label)
# Create the operation
database_operations = [
migrations.CreateModel(
"ILovePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveponies"},
),
migrations.CreateModel(
"ILoveMorePonies",
# We use IntegerField and not AutoField because
# the model is going to be deleted immediately
# and with an AutoField this fails on Oracle
[("id", models.IntegerField(primary_key=True))],
options={"db_table": "ilovemoreponies"},
),
migrations.DeleteModel("ILoveMorePonies"),
migrations.CreateModel(
"ILoveEvenMorePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveevenmoreponies"},
),
]
state_operations = [
migrations.CreateModel(
"SomethingElse",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingelse"},
),
migrations.DeleteModel("SomethingElse"),
migrations.CreateModel(
"SomethingCompletelyDifferent",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingcompletelydifferent"},
),
]
operation = migrations.SeparateDatabaseAndState(
state_operations=state_operations,
database_operations=database_operations,
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
def assertModelsAndTables(after_db):
# Tables and models exist, or don't, as they should:
self.assertNotIn((app_label, "somethingelse"), new_state.models)
self.assertEqual(
len(new_state.models[app_label, "somethingcompletelydifferent"].fields),
1,
)
self.assertNotIn((app_label, "iloveponiesonies"), new_state.models)
self.assertNotIn((app_label, "ilovemoreponies"), new_state.models)
self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models)
self.assertTableNotExists("somethingelse")
self.assertTableNotExists("somethingcompletelydifferent")
self.assertTableNotExists("ilovemoreponies")
if after_db:
self.assertTableExists("iloveponies")
self.assertTableExists("iloveevenmoreponies")
else:
self.assertTableNotExists("iloveponies")
self.assertTableNotExists("iloveevenmoreponies")
assertModelsAndTables(after_db=False)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertModelsAndTables(after_db=True)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertModelsAndTables(after_db=False)
class SwappableOperationTests(OperationTestBase):
"""
Key operations ignore swappable models
(we don't want to replicate all of them here, as the functionality
is in a common base class anyway)
"""
available_apps = ["migrations"]
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_create_ignore_swapped(self):
"""
The CreateTable operation ignores swapped models.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
options={
"swappable": "TEST_SWAP_MODEL",
},
)
# Test the state alteration (it should still be there!)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crigsw", new_state)
self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crigsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crigsw", editor, project_state, new_state)
self.assertTableNotExists("test_crigsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crigsw", editor, new_state, project_state
)
self.assertTableNotExists("test_crigsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_delete_ignore_swapped(self):
"""
Tests the DeleteModel operation ignores swapped models.
"""
operation = migrations.DeleteModel("Pony")
project_state, new_state = self.make_test_state("test_dligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_dligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dligsw", editor, project_state, new_state)
self.assertTableNotExists("test_dligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_dligsw", editor, new_state, project_state
)
self.assertTableNotExists("test_dligsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_add_field_ignore_swapped(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
project_state, new_state = self.make_test_state("test_adfligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_adfligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards(
"test_adfligsw", editor, project_state, new_state
)
self.assertTableNotExists("test_adfligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_adfligsw", editor, new_state, project_state
)
self.assertTableNotExists("test_adfligsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_indexes_ignore_swapped(self):
"""
Add/RemoveIndex operations ignore swapped models.
"""
operation = migrations.AddIndex(
"Pony", models.Index(fields=["pink"], name="my_name_idx")
)
project_state, new_state = self.make_test_state("test_adinigsw", operation)
with connection.schema_editor() as editor:
# No database queries should be run for swapped models
operation.database_forwards(
"test_adinigsw", editor, project_state, new_state
)
operation.database_backwards(
"test_adinigsw", editor, new_state, project_state
)
operation = migrations.RemoveIndex(
"Pony", models.Index(fields=["pink"], name="my_name_idx")
)
project_state, new_state = self.make_test_state("test_rminigsw", operation)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_rminigsw", editor, project_state, new_state
)
operation.database_backwards(
"test_rminigsw", editor, new_state, project_state
)
class TestCreateModel(SimpleTestCase):
def test_references_model_mixin(self):
migrations.CreateModel(
"name",
fields=[],
bases=(Mixin, models.Model),
).references_model("other_model", "migrations")
class FieldOperationTests(SimpleTestCase):
def test_references_model(self):
operation = FieldOperation(
"MoDel", "field", models.ForeignKey("Other", models.CASCADE)
)
# Model name match.
self.assertIs(operation.references_model("mOdEl", "migrations"), True)
# Referenced field.
self.assertIs(operation.references_model("oTher", "migrations"), True)
# Doesn't reference.
self.assertIs(operation.references_model("Whatever", "migrations"), False)
def test_references_field_by_name(self):
operation = FieldOperation("MoDel", "field", models.BooleanField(default=False))
self.assertIs(operation.references_field("model", "field", "migrations"), True)
def test_references_field_by_remote_field_model(self):
operation = FieldOperation(
"Model", "field", models.ForeignKey("Other", models.CASCADE)
)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Missing", "whatever", "migrations"), False
)
def test_references_field_by_from_fields(self):
operation = FieldOperation(
"Model",
"field",
models.fields.related.ForeignObject(
"Other", models.CASCADE, ["from"], ["to"]
),
)
self.assertIs(operation.references_field("Model", "from", "migrations"), True)
self.assertIs(operation.references_field("Model", "to", "migrations"), False)
self.assertIs(operation.references_field("Other", "from", "migrations"), False)
self.assertIs(operation.references_field("Model", "to", "migrations"), False)
def test_references_field_by_to_fields(self):
operation = FieldOperation(
"Model",
"field",
models.ForeignKey("Other", models.CASCADE, to_field="field"),
)
self.assertIs(operation.references_field("Other", "field", "migrations"), True)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), False
)
self.assertIs(
operation.references_field("Missing", "whatever", "migrations"), False
)
def test_references_field_by_through(self):
operation = FieldOperation(
"Model", "field", models.ManyToManyField("Other", through="Through")
)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Through", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Missing", "whatever", "migrations"), False
)
def test_reference_field_by_through_fields(self):
operation = FieldOperation(
"Model",
"field",
models.ManyToManyField(
"Other", through="Through", through_fields=("first", "second")
),
)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Through", "whatever", "migrations"), False
)
self.assertIs(
operation.references_field("Through", "first", "migrations"), True
)
self.assertIs(
operation.references_field("Through", "second", "migrations"), True
)
|
db85b7452162f0035d87d37e45fad87a67107fe443b138564baf5e3e0eb60c09 | import os
import shutil
import tempfile
from contextlib import contextmanager
from importlib import import_module
from django.apps import apps
from django.db import connection, connections, migrations, models
from django.db.migrations.migration import Migration
from django.db.migrations.recorder import MigrationRecorder
from django.db.migrations.state import ProjectState
from django.test import TransactionTestCase
from django.test.utils import extend_sys_path
from django.utils.module_loading import module_dir
class MigrationTestBase(TransactionTestCase):
"""
Contains an extended set of asserts for testing migrations and schema operations.
"""
available_apps = ["migrations"]
databases = {"default", "other"}
def tearDown(self):
# Reset applied-migrations state.
for db in self.databases:
recorder = MigrationRecorder(connections[db])
recorder.migration_qs.filter(app="migrations").delete()
def get_table_description(self, table, using="default"):
with connections[using].cursor() as cursor:
return connections[using].introspection.get_table_description(cursor, table)
def assertTableExists(self, table, using="default"):
with connections[using].cursor() as cursor:
self.assertIn(table, connections[using].introspection.table_names(cursor))
def assertTableNotExists(self, table, using="default"):
with connections[using].cursor() as cursor:
self.assertNotIn(
table, connections[using].introspection.table_names(cursor)
)
def assertColumnExists(self, table, column, using="default"):
self.assertIn(
column, [c.name for c in self.get_table_description(table, using=using)]
)
def assertColumnNotExists(self, table, column, using="default"):
self.assertNotIn(
column, [c.name for c in self.get_table_description(table, using=using)]
)
def _get_column_allows_null(self, table, column, using):
return [
c.null_ok
for c in self.get_table_description(table, using=using)
if c.name == column
][0]
def assertColumnNull(self, table, column, using="default"):
self.assertTrue(self._get_column_allows_null(table, column, using))
def assertColumnNotNull(self, table, column, using="default"):
self.assertFalse(self._get_column_allows_null(table, column, using))
def _get_column_collation(self, table, column, using):
return next(
f.collation
for f in self.get_table_description(table, using=using)
if f.name == column
)
def assertColumnCollation(self, table, column, collation, using="default"):
self.assertEqual(self._get_column_collation(table, column, using), collation)
def _get_table_comment(self, table, using):
with connections[using].cursor() as cursor:
return next(
t.comment
for t in connections[using].introspection.get_table_list(cursor)
if t.name == table
)
def assertTableComment(self, table, comment, using="default"):
self.assertEqual(self._get_table_comment(table, using), comment)
def assertTableCommentNotExists(self, table, using="default"):
self.assertIn(self._get_table_comment(table, using), [None, ""])
def assertIndexExists(
self, table, columns, value=True, using="default", index_type=None
):
with connections[using].cursor() as cursor:
self.assertEqual(
value,
any(
c["index"]
for c in connections[using]
.introspection.get_constraints(cursor, table)
.values()
if (
c["columns"] == list(columns)
and (index_type is None or c["type"] == index_type)
and not c["unique"]
)
),
)
def assertIndexNotExists(self, table, columns):
return self.assertIndexExists(table, columns, False)
def assertIndexNameExists(self, table, index, using="default"):
with connections[using].cursor() as cursor:
self.assertIn(
index,
connection.introspection.get_constraints(cursor, table),
)
def assertIndexNameNotExists(self, table, index, using="default"):
with connections[using].cursor() as cursor:
self.assertNotIn(
index,
connection.introspection.get_constraints(cursor, table),
)
def assertConstraintExists(self, table, name, value=True, using="default"):
with connections[using].cursor() as cursor:
constraints = (
connections[using].introspection.get_constraints(cursor, table).items()
)
self.assertEqual(
value,
any(c["check"] for n, c in constraints if n == name),
)
def assertConstraintNotExists(self, table, name):
return self.assertConstraintExists(table, name, False)
def assertUniqueConstraintExists(self, table, columns, value=True, using="default"):
with connections[using].cursor() as cursor:
constraints = (
connections[using].introspection.get_constraints(cursor, table).values()
)
self.assertEqual(
value,
any(c["unique"] for c in constraints if c["columns"] == list(columns)),
)
def assertFKExists(self, table, columns, to, value=True, using="default"):
if not connections[using].features.can_introspect_foreign_keys:
return
with connections[using].cursor() as cursor:
self.assertEqual(
value,
any(
c["foreign_key"] == to
for c in connections[using]
.introspection.get_constraints(cursor, table)
.values()
if c["columns"] == list(columns)
),
)
def assertFKNotExists(self, table, columns, to):
return self.assertFKExists(table, columns, to, False)
@contextmanager
def temporary_migration_module(self, app_label="migrations", module=None):
"""
Allows testing management commands in a temporary migrations module.
Wrap all invocations to makemigrations and squashmigrations with this
context manager in order to avoid creating migration files in your
source tree inadvertently.
Takes the application label that will be passed to makemigrations or
squashmigrations and the Python path to a migrations module.
The migrations module is used as a template for creating the temporary
migrations module. If it isn't provided, the application's migrations
module is used, if it exists.
Returns the filesystem path to the temporary migrations module.
"""
with tempfile.TemporaryDirectory() as temp_dir:
target_dir = tempfile.mkdtemp(dir=temp_dir)
with open(os.path.join(target_dir, "__init__.py"), "w"):
pass
target_migrations_dir = os.path.join(target_dir, "migrations")
if module is None:
module = apps.get_app_config(app_label).name + ".migrations"
try:
source_migrations_dir = module_dir(import_module(module))
except (ImportError, ValueError):
pass
else:
shutil.copytree(source_migrations_dir, target_migrations_dir)
with extend_sys_path(temp_dir):
new_module = os.path.basename(target_dir) + ".migrations"
with self.settings(MIGRATION_MODULES={app_label: new_module}):
yield target_migrations_dir
class OperationTestBase(MigrationTestBase):
"""Common functions to help test operations."""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._initial_table_names = frozenset(connection.introspection.table_names())
def tearDown(self):
self.cleanup_test_tables()
super().tearDown()
def cleanup_test_tables(self):
table_names = (
frozenset(connection.introspection.table_names())
- self._initial_table_names
)
with connection.schema_editor() as editor:
with connection.constraint_checks_disabled():
for table_name in table_names:
editor.execute(
editor.sql_delete_table
% {
"table": editor.quote_name(table_name),
}
)
def apply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration("name", app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.apply(project_state, editor)
def unapply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration("name", app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.unapply(project_state, editor)
def make_test_state(self, app_label, operation, **kwargs):
"""
Makes a test state using set_up_test_model and returns the
original state and the state after the migration is applied.
"""
project_state = self.set_up_test_model(app_label, **kwargs)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
return project_state, new_state
def set_up_test_model(
self,
app_label,
second_model=False,
third_model=False,
index=False,
multicol_index=False,
related_model=False,
mti_model=False,
proxy_model=False,
manager_model=False,
unique_together=False,
options=False,
db_table=None,
index_together=False, # RemovedInDjango51Warning.
constraints=None,
indexes=None,
):
"""Creates a test model state and database table."""
# Make the "current" state.
model_options = {
"swappable": "TEST_SWAP_MODEL",
# RemovedInDjango51Warning.
"index_together": [["weight", "pink"]] if index_together else [],
"unique_together": [["pink", "weight"]] if unique_together else [],
}
if options:
model_options["permissions"] = [("can_groom", "Can groom")]
if db_table:
model_options["db_table"] = db_table
operations = [
migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options=model_options,
)
]
if index:
operations.append(
migrations.AddIndex(
"Pony",
models.Index(fields=["pink"], name="pony_pink_idx"),
)
)
if multicol_index:
operations.append(
migrations.AddIndex(
"Pony",
models.Index(fields=["pink", "weight"], name="pony_test_idx"),
)
)
if indexes:
for index in indexes:
operations.append(migrations.AddIndex("Pony", index))
if constraints:
for constraint in constraints:
operations.append(migrations.AddConstraint("Pony", constraint))
if second_model:
operations.append(
migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
],
)
)
if third_model:
operations.append(
migrations.CreateModel(
"Van",
[
("id", models.AutoField(primary_key=True)),
],
)
)
if related_model:
operations.append(
migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
(
"friend",
models.ForeignKey("self", models.CASCADE, null=True),
),
],
)
)
if mti_model:
operations.append(
migrations.CreateModel(
"ShetlandPony",
fields=[
(
"pony_ptr",
models.OneToOneField(
"Pony",
models.CASCADE,
auto_created=True,
parent_link=True,
primary_key=True,
to_field="id",
serialize=False,
),
),
("cuteness", models.IntegerField(default=1)),
],
bases=["%s.Pony" % app_label],
)
)
if proxy_model:
operations.append(
migrations.CreateModel(
"ProxyPony",
fields=[],
options={"proxy": True},
bases=["%s.Pony" % app_label],
)
)
if manager_model:
from .models import FoodManager, FoodQuerySet
operations.append(
migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
)
return self.apply_operations(app_label, ProjectState(), operations)
|
02ebc2fcb60a961ed3f2919b0ffe22e2ced3045103251a41beafb490048cbc24 | import functools
import re
from unittest import mock
from django.apps import apps
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
from django.core.validators import RegexValidator, validate_slug
from django.db import connection, migrations, models
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.graph import MigrationGraph
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.questioner import MigrationQuestioner
from django.db.migrations.state import ModelState, ProjectState
from django.test import SimpleTestCase, TestCase, ignore_warnings, override_settings
from django.test.utils import isolate_lru_cache
from django.utils.deprecation import RemovedInDjango51Warning
from .models import FoodManager, FoodQuerySet
class DeconstructibleObject:
"""
A custom deconstructible object.
"""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def deconstruct(self):
return (self.__module__ + "." + self.__class__.__name__, self.args, self.kwargs)
class BaseAutodetectorTests(TestCase):
def repr_changes(self, changes, include_dependencies=False):
output = ""
for app_label, migrations_ in sorted(changes.items()):
output += " %s:\n" % app_label
for migration in migrations_:
output += " %s\n" % migration.name
for operation in migration.operations:
output += " %s\n" % operation
if include_dependencies:
output += " Dependencies:\n"
if migration.dependencies:
for dep in migration.dependencies:
output += " %s\n" % (dep,)
else:
output += " None\n"
return output
def assertNumberMigrations(self, changes, app_label, number):
if len(changes.get(app_label, [])) != number:
self.fail(
"Incorrect number of migrations (%s) for %s (expected %s)\n%s"
% (
len(changes.get(app_label, [])),
app_label,
number,
self.repr_changes(changes),
)
)
def assertMigrationDependencies(self, changes, app_label, position, dependencies):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if set(migration.dependencies) != set(dependencies):
self.fail(
"Migration dependencies mismatch for %s.%s (expected %s):\n%s"
% (
app_label,
migration.name,
dependencies,
self.repr_changes(changes, include_dependencies=True),
)
)
def assertOperationTypes(self, changes, app_label, position, types):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
real_types = [
operation.__class__.__name__ for operation in migration.operations
]
if types != real_types:
self.fail(
"Operation type mismatch for %s.%s (expected %s):\n%s"
% (
app_label,
migration.name,
types,
self.repr_changes(changes),
)
)
def assertOperationAttributes(
self, changes, app_label, position, operation_position, **attrs
):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if len(changes[app_label]) < position + 1:
self.fail(
"No operation at index %s for %s.%s\n%s"
% (
operation_position,
app_label,
migration.name,
self.repr_changes(changes),
)
)
operation = migration.operations[operation_position]
for attr, value in attrs.items():
if getattr(operation, attr, None) != value:
self.fail(
"Attribute mismatch for %s.%s op #%s, %s (expected %r, got %r):\n%s"
% (
app_label,
migration.name,
operation_position,
attr,
value,
getattr(operation, attr, None),
self.repr_changes(changes),
)
)
def assertOperationFieldAttributes(
self, changes, app_label, position, operation_position, **attrs
):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if len(changes[app_label]) < position + 1:
self.fail(
"No operation at index %s for %s.%s\n%s"
% (
operation_position,
app_label,
migration.name,
self.repr_changes(changes),
)
)
operation = migration.operations[operation_position]
if not hasattr(operation, "field"):
self.fail(
"No field attribute for %s.%s op #%s."
% (
app_label,
migration.name,
operation_position,
)
)
field = operation.field
for attr, value in attrs.items():
if getattr(field, attr, None) != value:
self.fail(
"Field attribute mismatch for %s.%s op #%s, field.%s (expected %r, "
"got %r):\n%s"
% (
app_label,
migration.name,
operation_position,
attr,
value,
getattr(field, attr, None),
self.repr_changes(changes),
)
)
def make_project_state(self, model_states):
"Shortcut to make ProjectStates from lists of predefined models"
project_state = ProjectState()
for model_state in model_states:
project_state.add_model(model_state.clone())
return project_state
def get_changes(self, before_states, after_states, questioner=None):
if not isinstance(before_states, ProjectState):
before_states = self.make_project_state(before_states)
if not isinstance(after_states, ProjectState):
after_states = self.make_project_state(after_states)
return MigrationAutodetector(
before_states,
after_states,
questioner,
)._detect_changes()
class AutodetectorTests(BaseAutodetectorTests):
"""
Tests the migration autodetector.
"""
author_empty = ModelState(
"testapp", "Author", [("id", models.AutoField(primary_key=True))]
)
author_name = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
)
author_name_null = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, null=True)),
],
)
author_name_longer = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=400)),
],
)
author_name_renamed = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("names", models.CharField(max_length=200)),
],
)
author_name_default = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default="Ada Lovelace")),
],
)
author_name_check_constraint = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"constraints": [
models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
]
},
)
author_dates_of_birth_auto_now = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("date_of_birth", models.DateField(auto_now=True)),
("date_time_of_birth", models.DateTimeField(auto_now=True)),
("time_of_birth", models.TimeField(auto_now=True)),
],
)
author_dates_of_birth_auto_now_add = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("date_of_birth", models.DateField(auto_now_add=True)),
("date_time_of_birth", models.DateTimeField(auto_now_add=True)),
("time_of_birth", models.TimeField(auto_now_add=True)),
],
)
author_name_deconstructible_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
],
)
author_name_deconstructible_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
],
)
author_name_deconstructible_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=models.IntegerField())),
],
)
author_name_deconstructible_4 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=models.IntegerField())),
],
)
author_name_deconstructible_list_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 123]
),
),
],
)
author_name_deconstructible_list_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 123]
),
),
],
)
author_name_deconstructible_list_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 999]
),
),
],
)
author_name_deconstructible_tuple_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 123)
),
),
],
)
author_name_deconstructible_tuple_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 123)
),
),
],
)
author_name_deconstructible_tuple_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 999)
),
),
],
)
author_name_deconstructible_dict_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 123},
),
),
],
)
author_name_deconstructible_dict_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 123},
),
),
],
)
author_name_deconstructible_dict_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 999},
),
),
],
)
author_name_nested_deconstructible_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_changed_arg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2-changed"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_extra_arg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
None,
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_changed_kwarg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c-changed")),
),
),
),
],
)
author_name_nested_deconstructible_extra_kwarg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
c=None,
),
),
),
],
)
author_custom_pk = ModelState(
"testapp", "Author", [("pk_field", models.IntegerField(primary_key=True))]
)
author_with_biography_non_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField()),
("biography", models.TextField()),
],
)
author_with_biography_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(blank=True)),
("biography", models.TextField(blank=True)),
],
)
author_with_book = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
author_with_book_order_wrt = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={"order_with_respect_to": "book"},
)
author_renamed_with_book = ModelState(
"testapp",
"Writer",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
author_with_publisher_string = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("publisher_name", models.CharField(max_length=200)),
],
)
author_with_publisher = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
author_with_user = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("user", models.ForeignKey("auth.User", models.CASCADE)),
],
)
author_with_custom_user = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("user", models.ForeignKey("thirdapp.CustomUser", models.CASCADE)),
],
)
author_proxy = ModelState(
"testapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)
)
author_proxy_options = ModelState(
"testapp",
"AuthorProxy",
[],
{
"proxy": True,
"verbose_name": "Super Author",
},
("testapp.author",),
)
author_proxy_notproxy = ModelState(
"testapp", "AuthorProxy", [], {}, ("testapp.author",)
)
author_proxy_third = ModelState(
"thirdapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)
)
author_proxy_third_notproxy = ModelState(
"thirdapp", "AuthorProxy", [], {}, ("testapp.author",)
)
author_proxy_proxy = ModelState(
"testapp", "AAuthorProxyProxy", [], {"proxy": True}, ("testapp.authorproxy",)
)
author_unmanaged = ModelState(
"testapp", "AuthorUnmanaged", [], {"managed": False}, ("testapp.author",)
)
author_unmanaged_managed = ModelState(
"testapp", "AuthorUnmanaged", [], {}, ("testapp.author",)
)
author_unmanaged_default_pk = ModelState(
"testapp", "Author", [("id", models.AutoField(primary_key=True))]
)
author_unmanaged_custom_pk = ModelState(
"testapp",
"Author",
[
("pk_field", models.IntegerField(primary_key=True)),
],
)
author_with_m2m = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.ManyToManyField("testapp.Publisher")),
],
)
author_with_m2m_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.ManyToManyField("testapp.Publisher", blank=True)),
],
)
author_with_m2m_through = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField("testapp.Publisher", through="testapp.Contract"),
),
],
)
author_with_renamed_m2m_through = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField("testapp.Publisher", through="testapp.Deal"),
),
],
)
author_with_former_m2m = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.CharField(max_length=100)),
],
)
author_with_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{
"permissions": [("can_hire", "Can hire")],
"verbose_name": "Authi",
},
)
author_with_db_table_comment = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table_comment": "Table comment"},
)
author_with_db_table_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_one"},
)
author_with_new_db_table_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_two"},
)
author_renamed_with_db_table_options = ModelState(
"testapp",
"NewAuthor",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_one"},
)
author_renamed_with_new_db_table_options = ModelState(
"testapp",
"NewAuthor",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_three"},
)
contract = ModelState(
"testapp",
"Contract",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
contract_renamed = ModelState(
"testapp",
"Deal",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
publisher = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_author = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_aardvark_author = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Aardvark", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_book = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("otherapp.Book", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
other_pony = ModelState(
"otherapp",
"Pony",
[
("id", models.AutoField(primary_key=True)),
],
)
other_pony_food = ModelState(
"otherapp",
"Pony",
[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
other_stable = ModelState(
"otherapp", "Stable", [("id", models.AutoField(primary_key=True))]
)
third_thing = ModelState(
"thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]
)
book = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_proxy_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("thirdapp.AuthorProxy", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_proxy_proxy_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.AAuthorProxyProxy", models.CASCADE)),
],
)
book_migrations_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("migrations.UnmigratedModel", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_no_author_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.IntegerField()),
("title", models.CharField(max_length=200)),
],
)
book_with_no_author = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
],
)
book_with_author_renamed = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Writer", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_field_and_author_renamed = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("writer", models.ForeignKey("testapp.Writer", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_multiple_authors = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("authors", models.ManyToManyField("testapp.Author")),
("title", models.CharField(max_length=200)),
],
)
book_with_multiple_authors_through_attribution = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
(
"authors",
models.ManyToManyField(
"testapp.Author", through="otherapp.Attribution"
),
),
("title", models.CharField(max_length=200)),
],
)
book_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(fields=["author", "title"], name="book_title_author_idx")
],
},
)
book_unordered_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(fields=["title", "author"], name="book_author_title_idx")
],
},
)
book_unique_together = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("author", "title")},
},
)
book_unique_together_2 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "author")},
},
)
book_unique_together_3 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "newfield")},
},
)
book_unique_together_4 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield2", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "newfield2")},
},
)
attribution = ModelState(
"otherapp",
"Attribution",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
edition = ModelState(
"thirdapp",
"Edition",
[
("id", models.AutoField(primary_key=True)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
custom_user = ModelState(
"thirdapp",
"CustomUser",
[
("id", models.AutoField(primary_key=True)),
("username", models.CharField(max_length=255)),
],
bases=(AbstractBaseUser,),
)
custom_user_no_inherit = ModelState(
"thirdapp",
"CustomUser",
[
("id", models.AutoField(primary_key=True)),
("username", models.CharField(max_length=255)),
],
)
aardvark = ModelState(
"thirdapp", "Aardvark", [("id", models.AutoField(primary_key=True))]
)
aardvark_testapp = ModelState(
"testapp", "Aardvark", [("id", models.AutoField(primary_key=True))]
)
aardvark_based_on_author = ModelState(
"testapp", "Aardvark", [], bases=("testapp.Author",)
)
aardvark_pk_fk_author = ModelState(
"testapp",
"Aardvark",
[
(
"id",
models.OneToOneField(
"testapp.Author", models.CASCADE, primary_key=True
),
),
],
)
knight = ModelState("eggs", "Knight", [("id", models.AutoField(primary_key=True))])
rabbit = ModelState(
"eggs",
"Rabbit",
[
("id", models.AutoField(primary_key=True)),
("knight", models.ForeignKey("eggs.Knight", models.CASCADE)),
("parent", models.ForeignKey("eggs.Rabbit", models.CASCADE)),
],
{
"unique_together": {("parent", "knight")},
"indexes": [
models.Index(
fields=["parent", "knight"], name="rabbit_circular_fk_index"
)
],
},
)
def test_arrange_for_graph(self):
"""Tests auto-naming of migrations for graph matching."""
# Make a fake graph
graph = MigrationGraph()
graph.add_node(("testapp", "0001_initial"), None)
graph.add_node(("testapp", "0002_foobar"), None)
graph.add_node(("otherapp", "0001_initial"), None)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("testapp", "0001_initial"),
)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("otherapp", "0001_initial"),
)
# Use project state to make a new migration change set
before = self.make_project_state([self.publisher, self.other_pony])
after = self.make_project_state(
[
self.author_empty,
self.publisher,
self.other_pony,
self.other_stable,
]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
changes = autodetector.arrange_for_graph(changes, graph)
# Make sure there's a new name, deps match, etc.
self.assertEqual(changes["testapp"][0].name, "0003_author")
self.assertEqual(
changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]
)
self.assertEqual(changes["otherapp"][0].name, "0002_stable")
self.assertEqual(
changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_arrange_for_graph_with_multiple_initial(self):
# Make a fake graph.
graph = MigrationGraph()
# Use project state to make a new migration change set.
before = self.make_project_state([])
after = self.make_project_state(
[self.author_with_book, self.book, self.attribution]
)
autodetector = MigrationAutodetector(
before, after, MigrationQuestioner({"ask_initial": True})
)
changes = autodetector._detect_changes()
changes = autodetector.arrange_for_graph(changes, graph)
self.assertEqual(changes["otherapp"][0].name, "0001_initial")
self.assertEqual(changes["otherapp"][0].dependencies, [])
self.assertEqual(changes["otherapp"][1].name, "0002_initial")
self.assertCountEqual(
changes["otherapp"][1].dependencies,
[("testapp", "0001_initial"), ("otherapp", "0001_initial")],
)
self.assertEqual(changes["testapp"][0].name, "0001_initial")
self.assertEqual(
changes["testapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_trim_apps(self):
"""
Trim does not remove dependencies but does remove unwanted apps.
"""
# Use project state to make a new migration change set
before = self.make_project_state([])
after = self.make_project_state(
[self.author_empty, self.other_pony, self.other_stable, self.third_thing]
)
autodetector = MigrationAutodetector(
before, after, MigrationQuestioner({"ask_initial": True})
)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
graph = MigrationGraph()
changes = autodetector.arrange_for_graph(changes, graph)
changes["testapp"][0].dependencies.append(("otherapp", "0001_initial"))
changes = autodetector._trim_to_apps(changes, {"testapp"})
# Make sure there's the right set of migrations
self.assertEqual(changes["testapp"][0].name, "0001_initial")
self.assertEqual(changes["otherapp"][0].name, "0001_initial")
self.assertNotIn("thirdapp", changes)
def test_custom_migration_name(self):
"""Tests custom naming of migrations for graph matching."""
# Make a fake graph
graph = MigrationGraph()
graph.add_node(("testapp", "0001_initial"), None)
graph.add_node(("testapp", "0002_foobar"), None)
graph.add_node(("otherapp", "0001_initial"), None)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("testapp", "0001_initial"),
)
# Use project state to make a new migration change set
before = self.make_project_state([])
after = self.make_project_state(
[self.author_empty, self.other_pony, self.other_stable]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
migration_name = "custom_name"
changes = autodetector.arrange_for_graph(changes, graph, migration_name)
# Make sure there's a new name, deps match, etc.
self.assertEqual(changes["testapp"][0].name, "0003_%s" % migration_name)
self.assertEqual(
changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]
)
self.assertEqual(changes["otherapp"][0].name, "0002_%s" % migration_name)
self.assertEqual(
changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_new_model(self):
"""Tests autodetection of new models."""
changes = self.get_changes([], [self.other_pony_food])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Pony")
self.assertEqual(
[name for name, mgr in changes["otherapp"][0].operations[0].managers],
["food_qs", "food_mgr", "food_mgr_kwargs"],
)
def test_old_model(self):
"""Tests deletion of old models."""
changes = self.get_changes([self.author_empty], [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
def test_add_field(self):
"""Tests autodetection of new fields."""
changes = self.get_changes([self.author_empty], [self.author_name])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_date_fields_with_auto_now_not_asking_for_default(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now=True)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_date_fields_with_auto_now_add_not_asking_for_null_addition(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now_add]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_auto_now_add_addition"
)
def test_add_date_fields_with_auto_now_add_asking_for_default(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now_add]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True)
self.assertEqual(mocked_ask_method.call_count, 3)
def test_remove_field(self):
"""Tests autodetection of removed fields."""
changes = self.get_changes([self.author_name], [self.author_empty])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
def test_alter_field(self):
"""Tests autodetection of new fields."""
changes = self.get_changes([self.author_name], [self.author_name_longer])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
def test_supports_functools_partial(self):
def _content_file_name(instance, filename, key, **kwargs):
return "{}/{}".format(instance, filename)
def content_file_name(key, **kwargs):
return functools.partial(_content_file_name, key, **kwargs)
# An unchanged partial reference.
before = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("file")
),
),
],
)
]
after = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("file")
),
),
],
)
]
changes = self.get_changes(before, after)
self.assertNumberMigrations(changes, "testapp", 0)
# A changed partial reference.
args_changed = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("other-file")
),
),
],
)
]
changes = self.get_changes(before, args_changed)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
# Can't use assertOperationFieldAttributes because we need the
# deconstructed version, i.e., the exploded func/args/keywords rather
# than the partial: we don't care if it's not the same instance of the
# partial, only if it's the same source function, args, and keywords.
value = changes["testapp"][0].operations[0].field.upload_to
self.assertEqual(
(_content_file_name, ("other-file",), {}),
(value.func, value.args, value.keywords),
)
kwargs_changed = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200,
upload_to=content_file_name("file", spam="eggs"),
),
),
],
)
]
changes = self.get_changes(before, kwargs_changed)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
value = changes["testapp"][0].operations[0].field.upload_to
self.assertEqual(
(_content_file_name, ("file",), {"spam": "eggs"}),
(value.func, value.args, value.keywords),
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_alter_field_to_not_null_with_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name_default])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default="Ada Lovelace"
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
return_value=models.NOT_PROVIDED,
)
def test_alter_field_to_not_null_without_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name])
self.assertEqual(mocked_ask_method.call_count, 1)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default=models.NOT_PROVIDED
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
return_value="Some Name",
)
def test_alter_field_to_not_null_oneoff_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name])
self.assertEqual(mocked_ask_method.call_count, 1)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=False
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default="Some Name"
)
def test_rename_field(self):
"""Tests autodetection of renamed fields."""
changes = self.get_changes(
[self.author_name],
[self.author_name_renamed],
MigrationQuestioner({"ask_rename": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="name", new_name="names"
)
def test_rename_field_foreign_key_to_field(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(unique=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"foo",
models.ForeignKey("app.Foo", models.CASCADE, to_field="field"),
),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("renamed_field", models.IntegerField(unique=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"foo",
models.ForeignKey(
"app.Foo", models.CASCADE, to_field="renamed_field"
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "app", 0, 0, old_name="field", new_name="renamed_field"
)
def test_rename_foreign_object_fields(self):
fields = ("first", "second")
renamed_fields = ("first_renamed", "second_renamed")
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
],
options={"unique_together": {fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=fields,
to_fields=fields,
),
),
],
),
]
# Case 1: to_fields renames.
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first_renamed", models.IntegerField()),
("second_renamed", models.IntegerField()),
],
options={"unique_together": {renamed_fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=fields,
to_fields=renamed_fields,
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(
changes, "app", 0, ["RenameField", "RenameField", "AlterUniqueTogether"]
)
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="foo",
old_name="first",
new_name="first_renamed",
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="foo",
old_name="second",
new_name="second_renamed",
)
# Case 2: from_fields renames.
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
],
options={"unique_together": {fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first_renamed", models.IntegerField()),
("second_renamed", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=renamed_fields,
to_fields=fields,
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="bar",
old_name="first",
new_name="first_renamed",
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="bar",
old_name="second",
new_name="second_renamed",
)
def test_rename_referenced_primary_key(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.CharField(primary_key=True, serialize=False)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
after = [
ModelState(
"app",
"Foo",
[("renamed_id", models.CharField(primary_key=True, serialize=False))],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "app", 0, 0, old_name="id", new_name="renamed_id"
)
def test_rename_field_preserved_db_column(self):
"""
RenameField is used if a field is renamed and db_column equal to the
old field's column is added.
"""
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField()),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("renamed_field", models.IntegerField(db_column="field")),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="foo",
name="field",
)
self.assertEqual(
changes["app"][0].operations[0].field.deconstruct(),
(
"field",
"django.db.models.IntegerField",
[],
{"db_column": "field"},
),
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="foo",
old_name="field",
new_name="renamed_field",
)
def test_rename_related_field_preserved_db_column(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"renamed_foo",
models.ForeignKey(
"app.Foo", models.CASCADE, db_column="foo_id"
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="bar",
name="foo",
)
self.assertEqual(
changes["app"][0].operations[0].field.deconstruct(),
(
"foo",
"django.db.models.ForeignKey",
[],
{"to": "app.foo", "on_delete": models.CASCADE, "db_column": "foo_id"},
),
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="bar",
old_name="foo",
new_name="renamed_foo",
)
def test_rename_field_with_renamed_model(self):
changes = self.get_changes(
[self.author_name],
[
ModelState(
"testapp",
"RenamedAuthor",
[
("id", models.AutoField(primary_key=True)),
("renamed_name", models.CharField(max_length=200)),
],
),
],
MigrationQuestioner({"ask_rename_model": True, "ask_rename": True}),
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel", "RenameField"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
old_name="Author",
new_name="RenamedAuthor",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
old_name="name",
new_name="renamed_name",
)
def test_rename_model(self):
"""Tests autodetection of renamed models."""
changes = self.get_changes(
[self.author_with_book, self.book],
[self.author_renamed_with_book, self.book_with_author_renamed],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="Writer"
)
# Now that RenameModel handles related fields too, there should be
# no AlterField for the related field.
self.assertNumberMigrations(changes, "otherapp", 0)
def test_rename_model_case(self):
"""
Model name is case-insensitive. Changing case doesn't lead to any
autodetected operations.
"""
author_renamed = ModelState(
"testapp",
"author",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes(
[self.author_empty, self.book],
[author_renamed, self.book],
questioner=MigrationQuestioner({"ask_rename_model": True}),
)
self.assertNumberMigrations(changes, "testapp", 0)
self.assertNumberMigrations(changes, "otherapp", 0)
def test_renamed_referenced_m2m_model_case(self):
publisher_renamed = ModelState(
"testapp",
"publisher",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
)
changes = self.get_changes(
[self.publisher, self.author_with_m2m],
[publisher_renamed, self.author_with_m2m],
questioner=MigrationQuestioner({"ask_rename_model": True}),
)
self.assertNumberMigrations(changes, "testapp", 0)
self.assertNumberMigrations(changes, "otherapp", 0)
def test_rename_m2m_through_model(self):
"""
Tests autodetection of renamed models that are used in M2M relations as
through models.
"""
changes = self.get_changes(
[self.author_with_m2m_through, self.publisher, self.contract],
[
self.author_with_renamed_m2m_through,
self.publisher,
self.contract_renamed,
],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Contract", new_name="Deal"
)
def test_rename_model_with_renamed_rel_field(self):
"""
Tests autodetection of renamed models while simultaneously renaming one
of the fields that relate to the renamed model.
"""
changes = self.get_changes(
[self.author_with_book, self.book],
[self.author_renamed_with_book, self.book_with_field_and_author_renamed],
MigrationQuestioner({"ask_rename": True, "ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="Writer"
)
# Right number/type of migrations for related field rename?
# Alter is already taken care of.
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, old_name="author", new_name="writer"
)
def test_rename_model_with_fks_in_different_position(self):
"""
#24537 - The order of fields in a model does not influence
the RenameModel detection.
"""
before = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"testapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
("some_label", models.CharField(max_length=255)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
],
),
]
after = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"testapp",
"RenamedEntityB",
[
("id", models.AutoField(primary_key=True)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
("some_label", models.CharField(max_length=255)),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename_model": True})
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="EntityB", new_name="RenamedEntityB"
)
def test_rename_model_reverse_relation_dependencies(self):
"""
The migration to rename a model pointed to by a foreign key in another
app must run after the other app's migration that adds the foreign key
with model's original name. Therefore, the renaming migration has a
dependency on that other migration.
"""
before = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"otherapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
],
),
]
after = [
ModelState(
"testapp",
"RenamedEntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"otherapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
(
"entity_a",
models.ForeignKey("testapp.RenamedEntityA", models.CASCADE),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename_model": True})
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "__first__")]
)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="EntityA", new_name="RenamedEntityA"
)
def test_fk_dependency(self):
"""Having a ForeignKey automatically adds a dependency."""
# Note that testapp (author) has no dependencies,
# otherapp (book) depends on testapp (author),
# thirdapp (edition) depends on otherapp (book)
changes = self.get_changes([], [self.author_name, self.book, self.edition])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(changes, "thirdapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="Edition")
self.assertMigrationDependencies(
changes, "thirdapp", 0, [("otherapp", "auto_1")]
)
def test_proxy_fk_dependency(self):
"""FK dependencies still work on proxy models."""
# Note that testapp (author) has no dependencies,
# otherapp (book) depends on testapp (authorproxy)
changes = self.get_changes(
[], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("thirdapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(changes, "thirdapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="AuthorProxy")
self.assertMigrationDependencies(
changes, "thirdapp", 0, [("testapp", "auto_1")]
)
def test_same_app_no_fk_dependency(self):
"""
A migration with a FK between two models of the same app
does not have a dependency to itself.
"""
changes = self.get_changes([], [self.author_with_publisher, self.publisher])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
def test_circular_fk_dependency(self):
"""
Having a circular ForeignKey dependency automatically
resolves the situation into 2 migrations on one side and 1 on the other.
"""
changes = self.get_changes(
[], [self.author_with_book, self.book, self.publisher_with_book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author")
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 2)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationTypes(changes, "otherapp", 1, ["AddField"])
self.assertMigrationDependencies(changes, "otherapp", 0, [])
self.assertMigrationDependencies(
changes, "otherapp", 1, [("otherapp", "auto_1"), ("testapp", "auto_1")]
)
# both split migrations should be `initial`
self.assertTrue(changes["otherapp"][0].initial)
self.assertTrue(changes["otherapp"][1].initial)
def test_same_app_circular_fk_dependency(self):
"""
A migration with a FK between two models of the same app does
not have a dependency to itself.
"""
changes = self.get_changes(
[], [self.author_with_publisher, self.publisher_with_author]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "CreateModel", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher")
self.assertMigrationDependencies(changes, "testapp", 0, [])
def test_same_app_circular_fk_dependency_with_unique_together_and_indexes(self):
"""
#22275 - A migration with circular FK dependency does not try
to create unique together constraint and indexes before creating all
required fields first.
"""
changes = self.get_changes([], [self.knight, self.rabbit])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "eggs", 1)
self.assertOperationTypes(
changes,
"eggs",
0,
["CreateModel", "CreateModel", "AddIndex", "AlterUniqueTogether"],
)
self.assertNotIn("unique_together", changes["eggs"][0].operations[0].options)
self.assertNotIn("unique_together", changes["eggs"][0].operations[1].options)
self.assertMigrationDependencies(changes, "eggs", 0, [])
def test_alter_db_table_add(self):
"""Tests detection for adding db_table in model's options."""
changes = self.get_changes(
[self.author_empty], [self.author_with_db_table_options]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table="author_one"
)
def test_alter_db_table_change(self):
"""Tests detection for changing db_table in model's options'."""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_with_new_db_table_options]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table="author_two"
)
def test_alter_db_table_remove(self):
"""Tests detection for removing db_table in model's options."""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_empty]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table=None
)
def test_alter_db_table_no_changes(self):
"""
Alter_db_table doesn't generate a migration if no changes have been made.
"""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_with_db_table_options]
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_keep_db_table_with_model_change(self):
"""
Tests when model changes but db_table stays as-is, autodetector must not
create more than one operation.
"""
changes = self.get_changes(
[self.author_with_db_table_options],
[self.author_renamed_with_db_table_options],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor"
)
def test_alter_db_table_with_model_change(self):
"""
Tests when model and db_table changes, autodetector must create two
operations.
"""
changes = self.get_changes(
[self.author_with_db_table_options],
[self.author_renamed_with_new_db_table_options],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RenameModel", "AlterModelTable"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="newauthor", table="author_three"
)
def test_alter_db_table_comment_add(self):
changes = self.get_changes(
[self.author_empty], [self.author_with_db_table_comment]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table_comment="Table comment"
)
def test_alter_db_table_comment_change(self):
author_with_new_db_table_comment = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table_comment": "New table comment"},
)
changes = self.get_changes(
[self.author_with_db_table_comment],
[author_with_new_db_table_comment],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
table_comment="New table comment",
)
def test_alter_db_table_comment_remove(self):
changes = self.get_changes(
[self.author_with_db_table_comment],
[self.author_empty],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", db_table_comment=None
)
def test_alter_db_table_comment_no_changes(self):
changes = self.get_changes(
[self.author_with_db_table_comment],
[self.author_with_db_table_comment],
)
self.assertNumberMigrations(changes, "testapp", 0)
def test_identical_regex_doesnt_alter(self):
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[
RegexValidator(
re.compile("^[-a-zA-Z0-9_]+\\Z"),
"Enter a valid “slug” consisting of letters, numbers, "
"underscores or hyphens.",
"invalid",
)
],
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[("id", models.AutoField(primary_key=True, validators=[validate_slug]))],
)
changes = self.get_changes([from_state], [to_state])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 0)
def test_different_regex_does_alter(self):
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[
RegexValidator(
re.compile("^[a-z]+\\Z", 32),
"Enter a valid “slug” consisting of letters, numbers, "
"underscores or hyphens.",
"invalid",
)
],
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[("id", models.AutoField(primary_key=True, validators=[validate_slug]))],
)
changes = self.get_changes([from_state], [to_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
def test_alter_regex_string_to_compiled_regex(self):
regex_string = "^[a-z]+$"
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True, validators=[RegexValidator(regex_string)]
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[RegexValidator(re.compile(regex_string))],
),
)
],
)
changes = self.get_changes([from_state], [to_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
def test_empty_unique_together(self):
"""Empty unique_together shouldn't generate a migration."""
# Explicitly testing for not specified, since this is the case after
# a CreateModel operation w/o any definition on the original model
model_state_not_specified = ModelState(
"a", "model", [("id", models.AutoField(primary_key=True))]
)
# Explicitly testing for None, since this was the issue in #23452 after
# an AlterUniqueTogether operation with e.g. () as value
model_state_none = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"unique_together": None,
},
)
# Explicitly testing for the empty set, since we now always have sets.
# During removal (('col1', 'col2'),) --> () this becomes set([])
model_state_empty = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"unique_together": set(),
},
)
def test(from_state, to_state, msg):
changes = self.get_changes([from_state], [to_state])
if changes:
ops = ", ".join(
o.__class__.__name__ for o in changes["a"][0].operations
)
self.fail("Created operation(s) %s from %s" % (ops, msg))
tests = (
(
model_state_not_specified,
model_state_not_specified,
'"not specified" to "not specified"',
),
(model_state_not_specified, model_state_none, '"not specified" to "None"'),
(
model_state_not_specified,
model_state_empty,
'"not specified" to "empty"',
),
(model_state_none, model_state_not_specified, '"None" to "not specified"'),
(model_state_none, model_state_none, '"None" to "None"'),
(model_state_none, model_state_empty, '"None" to "empty"'),
(
model_state_empty,
model_state_not_specified,
'"empty" to "not specified"',
),
(model_state_empty, model_state_none, '"empty" to "None"'),
(model_state_empty, model_state_empty, '"empty" to "empty"'),
)
for t in tests:
test(*t)
def test_create_model_with_indexes(self):
"""Test creation of new model with indexes already defined."""
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(fields=["name"], name="create_model_with_indexes_idx")
]
},
)
changes = self.get_changes([], [author])
added_index = models.Index(
fields=["name"], name="create_model_with_indexes_idx"
)
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 2)
# Right actions order?
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel", "AddIndex"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Author")
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="author", index=added_index
)
def test_add_indexes(self):
"""Test change detection of new indexes."""
changes = self.get_changes(
[self.author_empty, self.book], [self.author_empty, self.book_indexes]
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AddIndex"])
added_index = models.Index(
fields=["author", "title"], name="book_title_author_idx"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", index=added_index
)
def test_remove_indexes(self):
"""Test change detection of removed indexes."""
changes = self.get_changes(
[self.author_empty, self.book_indexes], [self.author_empty, self.book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RemoveIndex"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", name="book_title_author_idx"
)
def test_rename_indexes(self):
book_renamed_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(
fields=["author", "title"], name="renamed_book_title_author_idx"
)
],
},
)
changes = self.get_changes(
[self.author_empty, self.book_indexes],
[self.author_empty, book_renamed_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RenameIndex"])
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
model_name="book",
new_name="renamed_book_title_author_idx",
old_name="book_title_author_idx",
)
def test_order_fields_indexes(self):
"""Test change detection of reordering of fields in indexes."""
changes = self.get_changes(
[self.author_empty, self.book_indexes],
[self.author_empty, self.book_unordered_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RemoveIndex", "AddIndex"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", name="book_title_author_idx"
)
added_index = models.Index(
fields=["title", "author"], name="book_author_title_idx"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", index=added_index
)
def test_create_model_with_check_constraint(self):
"""Test creation of new model with constraints already defined."""
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"constraints": [
models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
]
},
)
changes = self.get_changes([], [author])
added_constraint = models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 2)
# Right actions order?
self.assertOperationTypes(
changes, "otherapp", 0, ["CreateModel", "AddConstraint"]
)
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Author")
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="author", constraint=added_constraint
)
def test_add_constraints(self):
"""Test change detection of new constraints."""
changes = self.get_changes(
[self.author_name], [self.author_name_check_constraint]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddConstraint"])
added_constraint = models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", constraint=added_constraint
)
def test_remove_constraints(self):
"""Test change detection of removed constraints."""
changes = self.get_changes(
[self.author_name_check_constraint], [self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveConstraint"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="name_contains_bob"
)
def test_add_unique_together(self):
"""Tests unique_together detection."""
changes = self.get_changes(
[self.author_empty, self.book],
[self.author_empty, self.book_unique_together],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether"])
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("author", "title")},
)
def test_remove_unique_together(self):
"""Tests unique_together detection."""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
def test_unique_together_remove_fk(self):
"""Tests unique_together and field removal detection & ordering"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book_with_no_author],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterUniqueTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", name="author"
)
def test_unique_together_no_changes(self):
"""
unique_together doesn't generate a migration if no
changes have been made.
"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book_unique_together],
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_unique_together_ordering(self):
"""
unique_together also triggers on ordering changes.
"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book_unique_together_2],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("title", "author")},
)
def test_add_field_and_unique_together(self):
"""
Added fields will be created before using them in unique_together.
"""
changes = self.get_changes(
[self.author_empty, self.book],
[self.author_empty, self.book_unique_together_3],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AddField", "AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
unique_together={("title", "newfield")},
)
def test_create_model_and_unique_together(self):
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
)
book_with_author = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("otherapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "author")},
},
)
changes = self.get_changes(
[self.book_with_no_author], [author, book_with_author]
)
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 3)
# Right actions order?
self.assertOperationTypes(
changes,
"otherapp",
0,
["CreateModel", "AddField", "AlterUniqueTogether"],
)
def test_remove_field_and_unique_together(self):
"""
Removed fields will be removed after updating unique_together.
"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together_3],
[self.author_empty, self.book_unique_together],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterUniqueTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("author", "title")},
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
model_name="book",
name="newfield",
)
def test_alter_field_and_unique_together(self):
"""Fields are altered after deleting some unique_together."""
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField(db_index=True)),
],
{
"unique_together": {("name",)},
},
)
author_reversed_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, unique=True)),
("age", models.IntegerField()),
],
{
"unique_together": {("age",)},
},
)
changes = self.get_changes([initial_author], [author_reversed_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterUniqueTogether",
"AlterField",
"AlterField",
"AlterUniqueTogether",
],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together=set(),
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
model_name="author",
name="age",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
2,
model_name="author",
name="name",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
3,
name="author",
unique_together={("age",)},
)
def test_partly_alter_unique_together_increase(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",)},
},
)
author_new_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",), ("age",)},
},
)
changes = self.get_changes([initial_author], [author_new_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together={("name",), ("age",)},
)
def test_partly_alter_unique_together_decrease(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",), ("age",)},
},
)
author_new_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",)},
},
)
changes = self.get_changes([initial_author], [author_new_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together={("name",)},
)
def test_rename_field_and_unique_together(self):
"""Fields are renamed before updating unique_together."""
changes = self.get_changes(
[self.author_empty, self.book_unique_together_3],
[self.author_empty, self.book_unique_together_4],
MigrationQuestioner({"ask_rename": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["RenameField", "AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
unique_together={("title", "newfield2")},
)
def test_proxy(self):
"""The autodetector correctly deals with proxy models."""
# First, we test adding a proxy model
changes = self.get_changes(
[self.author_empty], [self.author_empty, self.author_proxy]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="AuthorProxy",
options={"proxy": True, "indexes": [], "constraints": []},
)
# Now, we test turning a proxy model into a non-proxy model
# It should delete the proxy then make the real one
changes = self.get_changes(
[self.author_empty, self.author_proxy],
[self.author_empty, self.author_proxy_notproxy],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="AuthorProxy")
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="AuthorProxy", options={}
)
def test_proxy_non_model_parent(self):
class Mixin:
pass
author_proxy_non_model_parent = ModelState(
"testapp",
"AuthorProxy",
[],
{"proxy": True},
(Mixin, "testapp.author"),
)
changes = self.get_changes(
[self.author_empty],
[self.author_empty, author_proxy_non_model_parent],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="AuthorProxy",
options={"proxy": True, "indexes": [], "constraints": []},
bases=(Mixin, "testapp.author"),
)
def test_proxy_custom_pk(self):
"""
#23415 - The autodetector must correctly deal with custom FK on proxy
models.
"""
# First, we test the default pk field name
changes = self.get_changes(
[], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]
)
# The model the FK is pointing from and to.
self.assertEqual(
changes["otherapp"][0].operations[0].fields[2][1].remote_field.model,
"thirdapp.AuthorProxy",
)
# Now, we test the custom pk field name
changes = self.get_changes(
[], [self.author_custom_pk, self.author_proxy_third, self.book_proxy_fk]
)
# The model the FK is pointing from and to.
self.assertEqual(
changes["otherapp"][0].operations[0].fields[2][1].remote_field.model,
"thirdapp.AuthorProxy",
)
def test_proxy_to_mti_with_fk_to_proxy(self):
# First, test the pk table and field name.
to_state = self.make_project_state(
[self.author_empty, self.author_proxy_third, self.book_proxy_fk],
)
changes = self.get_changes([], to_state)
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "author"),
)
self.assertEqual(fk_field.remote_field.model, "thirdapp.AuthorProxy")
# Change AuthorProxy to use MTI.
from_state = to_state.clone()
to_state = self.make_project_state(
[self.author_empty, self.author_proxy_third_notproxy, self.book_proxy_fk],
)
changes = self.get_changes(from_state, to_state)
# Right number/type of migrations for the AuthorProxy model?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["DeleteModel", "CreateModel"]
)
# Right number/type of migrations for the Book model with a FK to
# AuthorProxy?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
# otherapp should depend on thirdapp.
self.assertMigrationDependencies(
changes, "otherapp", 0, [("thirdapp", "auto_1")]
)
# Now, test the pk table and field name.
fk_field = changes["otherapp"][0].operations[0].field
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("thirdapp", "authorproxy"),
)
self.assertEqual(fk_field.remote_field.model, "thirdapp.AuthorProxy")
def test_proxy_to_mti_with_fk_to_proxy_proxy(self):
# First, test the pk table and field name.
to_state = self.make_project_state(
[
self.author_empty,
self.author_proxy,
self.author_proxy_proxy,
self.book_proxy_proxy_fk,
]
)
changes = self.get_changes([], to_state)
fk_field = changes["otherapp"][0].operations[0].fields[1][1]
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "author"),
)
self.assertEqual(fk_field.remote_field.model, "testapp.AAuthorProxyProxy")
# Change AuthorProxy to use MTI. FK still points to AAuthorProxyProxy,
# a proxy of AuthorProxy.
from_state = to_state.clone()
to_state = self.make_project_state(
[
self.author_empty,
self.author_proxy_notproxy,
self.author_proxy_proxy,
self.book_proxy_proxy_fk,
]
)
changes = self.get_changes(from_state, to_state)
# Right number/type of migrations for the AuthorProxy model?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"])
# Right number/type of migrations for the Book model with a FK to
# AAuthorProxyProxy?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
# otherapp should depend on testapp.
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "auto_1")]
)
# Now, test the pk table and field name.
fk_field = changes["otherapp"][0].operations[0].field
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "authorproxy"),
)
self.assertEqual(fk_field.remote_field.model, "testapp.AAuthorProxyProxy")
def test_unmanaged_create(self):
"""The autodetector correctly deals with managed models."""
# First, we test adding an unmanaged model
changes = self.get_changes(
[self.author_empty], [self.author_empty, self.author_unmanaged]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="AuthorUnmanaged", options={"managed": False}
)
def test_unmanaged_delete(self):
changes = self.get_changes(
[self.author_empty, self.author_unmanaged], [self.author_empty]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel"])
def test_unmanaged_to_managed(self):
# Now, we test turning an unmanaged model into a managed model
changes = self.get_changes(
[self.author_empty, self.author_unmanaged],
[self.author_empty, self.author_unmanaged_managed],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="authorunmanaged", options={}
)
def test_managed_to_unmanaged(self):
# Now, we turn managed to unmanaged.
changes = self.get_changes(
[self.author_empty, self.author_unmanaged_managed],
[self.author_empty, self.author_unmanaged],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="authorunmanaged", options={"managed": False}
)
def test_unmanaged_custom_pk(self):
"""
#23415 - The autodetector must correctly deal with custom FK on
unmanaged models.
"""
# First, we test the default pk field name
changes = self.get_changes([], [self.author_unmanaged_default_pk, self.book])
# The model the FK on the book model points to.
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(fk_field.remote_field.model, "testapp.Author")
# Now, we test the custom pk field name
changes = self.get_changes([], [self.author_unmanaged_custom_pk, self.book])
# The model the FK on the book model points to.
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(fk_field.remote_field.model, "testapp.Author")
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes(
[self.custom_user], [self.custom_user, self.author_with_custom_user]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(
changes, "testapp", 0, [("__setting__", "AUTH_USER_MODEL")]
)
def test_swappable_lowercase(self):
model_state = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
(
"owner",
models.ForeignKey(
settings.AUTH_USER_MODEL.lower(),
models.CASCADE,
),
),
],
)
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes([], [model_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Document")
self.assertMigrationDependencies(
changes,
"testapp",
0,
[("__setting__", "AUTH_USER_MODEL")],
)
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable_many_to_many_model_case(self):
document_lowercase = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
("owners", models.ManyToManyField(settings.AUTH_USER_MODEL.lower())),
],
)
document = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
("owners", models.ManyToManyField(settings.AUTH_USER_MODEL)),
],
)
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes(
[self.custom_user, document_lowercase],
[self.custom_user, document],
)
self.assertEqual(len(changes), 0)
def test_swappable_changed(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
before = self.make_project_state([self.custom_user, self.author_with_user])
with override_settings(AUTH_USER_MODEL="thirdapp.CustomUser"):
after = self.make_project_state(
[self.custom_user, self.author_with_custom_user]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="user"
)
fk_field = changes["testapp"][0].operations[0].field
self.assertEqual(fk_field.remote_field.model, "thirdapp.CustomUser")
def test_add_field_with_default(self):
"""#22030 - Adding a field with a default should work."""
changes = self.get_changes([self.author_empty], [self.author_name_default])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
def test_custom_deconstructible(self):
"""
Two instances which deconstruct to the same value aren't considered a
change.
"""
changes = self.get_changes(
[self.author_name_deconstructible_1], [self.author_name_deconstructible_2]
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_deconstruct_field_kwarg(self):
"""Field instances are handled correctly by nested deconstruction."""
changes = self.get_changes(
[self.author_name_deconstructible_3], [self.author_name_deconstructible_4]
)
self.assertEqual(changes, {})
def test_deconstructible_list(self):
"""Nested deconstruction descends into lists."""
# When lists contain items that deconstruct to identical values, those lists
# should be considered equal for the purpose of detecting state changes
# (even if the original items are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_list_1],
[self.author_name_deconstructible_list_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed lists should be reported
# as a change
changes = self.get_changes(
[self.author_name_deconstructible_list_1],
[self.author_name_deconstructible_list_3],
)
self.assertEqual(len(changes), 1)
def test_deconstructible_tuple(self):
"""Nested deconstruction descends into tuples."""
# When tuples contain items that deconstruct to identical values, those tuples
# should be considered equal for the purpose of detecting state changes
# (even if the original items are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_tuple_1],
[self.author_name_deconstructible_tuple_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed tuples should be reported
# as a change
changes = self.get_changes(
[self.author_name_deconstructible_tuple_1],
[self.author_name_deconstructible_tuple_3],
)
self.assertEqual(len(changes), 1)
def test_deconstructible_dict(self):
"""Nested deconstruction descends into dict values."""
# When dicts contain items whose values deconstruct to identical values,
# those dicts should be considered equal for the purpose of detecting
# state changes (even if the original values are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_dict_1],
[self.author_name_deconstructible_dict_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed dicts should be reported
# as a change
changes = self.get_changes(
[self.author_name_deconstructible_dict_1],
[self.author_name_deconstructible_dict_3],
)
self.assertEqual(len(changes), 1)
def test_nested_deconstructible_objects(self):
"""
Nested deconstruction is applied recursively to the args/kwargs of
deconstructed objects.
"""
# If the items within a deconstructed object's args/kwargs have the same
# deconstructed values - whether or not the items themselves are different
# instances - then the object as a whole is regarded as unchanged.
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_2],
)
self.assertEqual(changes, {})
# Differences that exist solely within the args list of a deconstructed object
# should be reported as changes
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_changed_arg],
)
self.assertEqual(len(changes), 1)
# Additional args should also be reported as a change
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_extra_arg],
)
self.assertEqual(len(changes), 1)
# Differences that exist solely within the kwargs dict of a deconstructed object
# should be reported as changes
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_changed_kwarg],
)
self.assertEqual(len(changes), 1)
# Additional kwargs should also be reported as a change
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_extra_kwarg],
)
self.assertEqual(len(changes), 1)
def test_deconstruct_type(self):
"""
#22951 -- Uninstantiated classes with deconstruct are correctly returned
by deep_deconstruct during serialization.
"""
author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
# IntegerField intentionally not instantiated.
default=models.IntegerField,
),
),
],
)
changes = self.get_changes([], [author])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
def test_replace_string_with_foreignkey(self):
"""
#22300 - Adding an FK in the same "spot" as a deleted CharField should
work.
"""
changes = self.get_changes(
[self.author_with_publisher_string],
[self.author_with_publisher, self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="publisher_name")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher")
def test_foreign_key_removed_before_target_model(self):
"""
Removing an FK and the model it targets in the same change must remove
the FK field before the model to maintain consistency.
"""
changes = self.get_changes(
[self.author_with_publisher, self.publisher], [self.author_name]
) # removes both the model and FK
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "DeleteModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_many_to_many(self, mocked_ask_method):
"""#22435 - Adding a ManyToManyField should not prompt for a default."""
changes = self.get_changes(
[self.author_empty, self.publisher], [self.author_with_m2m, self.publisher]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publishers")
def test_alter_many_to_many(self):
changes = self.get_changes(
[self.author_with_m2m, self.publisher],
[self.author_with_m2m_blank, self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publishers")
def test_create_with_through_model(self):
"""
Adding a m2m with a through model and the models that use it should be
ordered correctly.
"""
changes = self.get_changes(
[], [self.author_with_m2m_through, self.publisher, self.contract]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"CreateModel",
"CreateModel",
"CreateModel",
"AddField",
],
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Contract")
self.assertOperationAttributes(
changes, "testapp", 0, 3, model_name="author", name="publishers"
)
def test_create_with_through_model_separate_apps(self):
author_with_m2m_through = ModelState(
"authors",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField(
"testapp.Publisher", through="contract.Contract"
),
),
],
)
contract = ModelState(
"contract",
"Contract",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("authors.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
changes = self.get_changes(
[], [author_with_m2m_through, self.publisher, contract]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertNumberMigrations(changes, "contract", 1)
self.assertNumberMigrations(changes, "authors", 2)
self.assertMigrationDependencies(
changes,
"authors",
1,
{("authors", "auto_1"), ("contract", "auto_1"), ("testapp", "auto_1")},
)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationTypes(changes, "contract", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "contract", 0, 0, name="Contract")
self.assertOperationTypes(changes, "authors", 0, ["CreateModel"])
self.assertOperationTypes(changes, "authors", 1, ["AddField"])
self.assertOperationAttributes(changes, "authors", 0, 0, name="Author")
self.assertOperationAttributes(
changes, "authors", 1, 0, model_name="author", name="publishers"
)
def test_many_to_many_removed_before_through_model(self):
"""
Removing a ManyToManyField and the "through" model in the same change
must remove the field before the model to maintain consistency.
"""
changes = self.get_changes(
[
self.book_with_multiple_authors_through_attribution,
self.author_name,
self.attribution,
],
[self.book_with_no_author, self.author_name],
)
# Remove both the through model and ManyToMany
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["RemoveField", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="authors", model_name="book"
)
self.assertOperationAttributes(changes, "otherapp", 0, 1, name="Attribution")
def test_many_to_many_removed_before_through_model_2(self):
"""
Removing a model that contains a ManyToManyField and the "through" model
in the same change must remove the field before the model to maintain
consistency.
"""
changes = self.get_changes(
[
self.book_with_multiple_authors_through_attribution,
self.author_name,
self.attribution,
],
[self.author_name],
)
# Remove both the through model and ManyToMany
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="authors", model_name="book"
)
self.assertOperationAttributes(changes, "otherapp", 0, 1, name="Attribution")
self.assertOperationAttributes(changes, "otherapp", 0, 2, name="Book")
def test_m2m_w_through_multistep_remove(self):
"""
A model with a m2m field that specifies a "through" model cannot be
removed in the same migration as that through model as the schema will
pass through an inconsistent state. The autodetector should produce two
migrations to avoid this issue.
"""
changes = self.get_changes(
[self.author_with_m2m_through, self.publisher, self.contract],
[self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["RemoveField", "RemoveField", "DeleteModel", "DeleteModel"],
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", model_name="contract"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="publisher", model_name="contract"
)
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 3, name="Contract")
def test_concrete_field_changed_to_many_to_many(self):
"""
#23938 - Changing a concrete field into a ManyToManyField
first removes the concrete field and then adds the m2m field.
"""
changes = self.get_changes(
[self.author_with_former_m2m], [self.author_with_m2m, self.publisher]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="publishers", model_name="author"
)
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="publishers", model_name="author"
)
def test_many_to_many_changed_to_concrete_field(self):
"""
#23938 - Changing a ManyToManyField into a concrete field
first removes the m2m field and then adds the concrete field.
"""
changes = self.get_changes(
[self.author_with_m2m, self.publisher], [self.author_with_former_m2m]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RemoveField", "DeleteModel", "AddField"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="publishers", model_name="author"
)
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="publishers", model_name="author"
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, max_length=100)
def test_non_circular_foreignkey_dependency_removal(self):
"""
If two models with a ForeignKey from one to the other are removed at the
same time, the autodetector should remove them in the correct order.
"""
changes = self.get_changes(
[self.author_with_publisher, self.publisher_with_author], []
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", model_name="publisher"
)
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Publisher")
def test_alter_model_options(self):
"""Changing a model's options should make a change."""
changes = self.get_changes([self.author_empty], [self.author_with_options])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
options={
"permissions": [("can_hire", "Can hire")],
"verbose_name": "Authi",
},
)
# Changing them back to empty should also make a change
changes = self.get_changes([self.author_with_options], [self.author_empty])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", options={}
)
def test_alter_model_options_proxy(self):
"""Changing a proxy model's options should also make a change."""
changes = self.get_changes(
[self.author_proxy, self.author_empty],
[self.author_proxy_options, self.author_empty],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="authorproxy",
options={"verbose_name": "Super Author"},
)
def test_set_alter_order_with_respect_to(self):
"""Setting order_with_respect_to adds a field."""
changes = self.get_changes(
[self.book, self.author_with_book],
[self.book, self.author_with_book_order_wrt],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterOrderWithRespectTo"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", order_with_respect_to="book"
)
def test_add_alter_order_with_respect_to(self):
"""
Setting order_with_respect_to when adding the FK too does
things in the right order.
"""
changes = self.get_changes(
[self.author_name], [self.book, self.author_with_book_order_wrt]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AlterOrderWithRespectTo"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="book"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="author", order_with_respect_to="book"
)
def test_remove_alter_order_with_respect_to(self):
"""
Removing order_with_respect_to when removing the FK too does
things in the right order.
"""
changes = self.get_changes(
[self.book, self.author_with_book_order_wrt], [self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AlterOrderWithRespectTo", "RemoveField"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", order_with_respect_to=None
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, model_name="author", name="book"
)
def test_add_model_order_with_respect_to(self):
"""
Setting order_with_respect_to when adding the whole model
does things in the right order.
"""
changes = self.get_changes([], [self.book, self.author_with_book_order_wrt])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={"order_with_respect_to": "book"},
)
self.assertNotIn(
"_order",
[name for name, field in changes["testapp"][0].operations[0].fields],
)
def test_add_model_order_with_respect_to_unique_together(self):
changes = self.get_changes(
[],
[
self.book,
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"unique_together": {("id", "_order")},
},
),
],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={
"order_with_respect_to": "book",
"unique_together": {("id", "_order")},
},
)
def test_add_model_order_with_respect_to_index_constraint(self):
tests = [
(
"AddIndex",
{
"indexes": [
models.Index(fields=["_order"], name="book_order_idx"),
]
},
),
(
"AddConstraint",
{
"constraints": [
models.CheckConstraint(
check=models.Q(_order__gt=1),
name="book_order_gt_1",
),
]
},
),
]
for operation, extra_option in tests:
with self.subTest(operation=operation):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
**extra_option,
},
)
changes = self.get_changes([], [self.book, after])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"CreateModel",
operation,
],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={"order_with_respect_to": "book"},
)
def test_set_alter_order_with_respect_to_index_constraint_unique_together(self):
tests = [
(
"AddIndex",
{
"indexes": [
models.Index(fields=["_order"], name="book_order_idx"),
]
},
),
(
"AddConstraint",
{
"constraints": [
models.CheckConstraint(
check=models.Q(_order__gt=1),
name="book_order_gt_1",
),
]
},
),
("AlterUniqueTogether", {"unique_together": {("id", "_order")}}),
]
for operation, extra_option in tests:
with self.subTest(operation=operation):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
**extra_option,
},
)
changes = self.get_changes(
[self.book, self.author_with_book],
[self.book, after],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterOrderWithRespectTo",
operation,
],
)
def test_alter_model_managers(self):
"""
Changing the model managers adds a new operation.
"""
changes = self.get_changes([self.other_pony], [self.other_pony_food])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterModelManagers"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="pony")
self.assertEqual(
[name for name, mgr in changes["otherapp"][0].operations[0].managers],
["food_qs", "food_mgr", "food_mgr_kwargs"],
)
self.assertEqual(
changes["otherapp"][0].operations[0].managers[1][1].args, ("a", "b", 1, 2)
)
self.assertEqual(
changes["otherapp"][0].operations[0].managers[2][1].args, ("x", "y", 3, 4)
)
def test_swappable_first_inheritance(self):
"""Swappable models get their CreateModel first."""
changes = self.get_changes([], [self.custom_user, self.aardvark])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="CustomUser")
self.assertOperationAttributes(changes, "thirdapp", 0, 1, name="Aardvark")
def test_default_related_name_option(self):
model_state = ModelState(
"app",
"model",
[
("id", models.AutoField(primary_key=True)),
],
options={"default_related_name": "related_name"},
)
changes = self.get_changes([], [model_state])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
name="model",
options={"default_related_name": "related_name"},
)
altered_model_state = ModelState(
"app",
"Model",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes([model_state], [altered_model_state])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterModelOptions"])
self.assertOperationAttributes(changes, "app", 0, 0, name="model", options={})
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable_first_setting(self):
"""Swappable models get their CreateModel first."""
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes([], [self.custom_user_no_inherit, self.aardvark])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="CustomUser")
self.assertOperationAttributes(changes, "thirdapp", 0, 1, name="Aardvark")
def test_bases_first(self):
"""Bases of other models come first."""
changes = self.get_changes(
[], [self.aardvark_based_on_author, self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Aardvark")
def test_bases_first_mixed_case_app_label(self):
app_label = "MiXedCaseApp"
changes = self.get_changes(
[],
[
ModelState(
app_label,
"owner",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
app_label,
"place",
[
("id", models.AutoField(primary_key=True)),
(
"owner",
models.ForeignKey("MiXedCaseApp.owner", models.CASCADE),
),
],
),
ModelState(app_label, "restaurant", [], bases=("MiXedCaseApp.place",)),
],
)
self.assertNumberMigrations(changes, app_label, 1)
self.assertOperationTypes(
changes,
app_label,
0,
[
"CreateModel",
"CreateModel",
"CreateModel",
],
)
self.assertOperationAttributes(changes, app_label, 0, 0, name="owner")
self.assertOperationAttributes(changes, app_label, 0, 1, name="place")
self.assertOperationAttributes(changes, app_label, 0, 2, name="restaurant")
def test_multiple_bases(self):
"""
Inheriting models doesn't move *_ptr fields into AddField operations.
"""
A = ModelState("app", "A", [("a_id", models.AutoField(primary_key=True))])
B = ModelState("app", "B", [("b_id", models.AutoField(primary_key=True))])
C = ModelState("app", "C", [], bases=("app.A", "app.B"))
D = ModelState("app", "D", [], bases=("app.A", "app.B"))
E = ModelState("app", "E", [], bases=("app.A", "app.B"))
changes = self.get_changes([], [A, B, C, D, E])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(
changes,
"app",
0,
["CreateModel", "CreateModel", "CreateModel", "CreateModel", "CreateModel"],
)
self.assertOperationAttributes(changes, "app", 0, 0, name="A")
self.assertOperationAttributes(changes, "app", 0, 1, name="B")
self.assertOperationAttributes(changes, "app", 0, 2, name="C")
self.assertOperationAttributes(changes, "app", 0, 3, name="D")
self.assertOperationAttributes(changes, "app", 0, 4, name="E")
def test_proxy_bases_first(self):
"""Bases of proxies come first."""
changes = self.get_changes(
[], [self.author_empty, self.author_proxy, self.author_proxy_proxy]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="AuthorProxy")
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="AAuthorProxyProxy"
)
def test_pk_fk_included(self):
"""
A relation used as the primary key is kept as part of CreateModel.
"""
changes = self.get_changes([], [self.aardvark_pk_fk_author, self.author_name])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Aardvark")
def test_first_dependency(self):
"""
A dependency to an app with no migrations uses __first__.
"""
# Load graph
loader = MigrationLoader(connection)
before = self.make_project_state([])
after = self.make_project_state([self.book_migrations_fk])
after.real_apps = {"migrations"}
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes(graph=loader.graph)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("migrations", "__first__")]
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_last_dependency(self):
"""
A dependency to an app with existing migrations uses the
last migration of that app.
"""
# Load graph
loader = MigrationLoader(connection)
before = self.make_project_state([])
after = self.make_project_state([self.book_migrations_fk])
after.real_apps = {"migrations"}
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes(graph=loader.graph)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("migrations", "0002_second")]
)
def test_alter_fk_before_model_deletion(self):
"""
ForeignKeys are altered _before_ the model they used to
refer to are deleted.
"""
changes = self.get_changes(
[self.author_name, self.publisher_with_author],
[self.aardvark_testapp, self.publisher_with_aardvark_author],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "AlterField", "DeleteModel"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Aardvark")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="author")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author")
def test_fk_dependency_other_app(self):
"""
#23100 - ForeignKeys correctly depend on other apps' models.
"""
changes = self.get_changes(
[self.author_name, self.book], [self.author_with_book, self.book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="book")
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "__first__")]
)
def test_alter_unique_together_fk_to_m2m(self):
changes = self.get_changes(
[self.author_name, self.book_unique_together],
[
self.author_name,
ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ManyToManyField("testapp.Author")),
("title", models.CharField(max_length=200)),
],
),
],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["AlterUniqueTogether", "RemoveField", "AddField"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", name="author"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 2, model_name="book", name="author"
)
def test_alter_field_to_fk_dependency_other_app(self):
changes = self.get_changes(
[self.author_empty, self.book_with_no_author_fk],
[self.author_empty, self.book],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "__first__")]
)
def test_circular_dependency_mixed_addcreate(self):
"""
#23315 - The dependency resolver knows to put all CreateModel
before AddField and not become unsolvable.
"""
address = ModelState(
"a",
"Address",
[
("id", models.AutoField(primary_key=True)),
("country", models.ForeignKey("b.DeliveryCountry", models.CASCADE)),
],
)
person = ModelState(
"a",
"Person",
[
("id", models.AutoField(primary_key=True)),
],
)
apackage = ModelState(
"b",
"APackage",
[
("id", models.AutoField(primary_key=True)),
("person", models.ForeignKey("a.Person", models.CASCADE)),
],
)
country = ModelState(
"b",
"DeliveryCountry",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes([], [address, person, apackage, country])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "a", 0, ["CreateModel", "CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertOperationTypes(changes, "b", 0, ["CreateModel", "CreateModel"])
@override_settings(AUTH_USER_MODEL="a.Tenant")
def test_circular_dependency_swappable(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
tenant = ModelState(
"a",
"Tenant",
[
("id", models.AutoField(primary_key=True)),
("primary_address", models.ForeignKey("b.Address", models.CASCADE)),
],
bases=(AbstractBaseUser,),
)
address = ModelState(
"b",
"Address",
[
("id", models.AutoField(primary_key=True)),
(
"tenant",
models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE),
),
],
)
changes = self.get_changes([], [address, tenant])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertMigrationDependencies(changes, "a", 0, [])
self.assertMigrationDependencies(
changes, "a", 1, [("a", "auto_1"), ("b", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "b", 0, ["CreateModel"])
self.assertMigrationDependencies(
changes, "b", 0, [("__setting__", "AUTH_USER_MODEL")]
)
@override_settings(AUTH_USER_MODEL="b.Tenant")
def test_circular_dependency_swappable2(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models but with the swappable not being the first migrated
model.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
address = ModelState(
"a",
"Address",
[
("id", models.AutoField(primary_key=True)),
(
"tenant",
models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE),
),
],
)
tenant = ModelState(
"b",
"Tenant",
[
("id", models.AutoField(primary_key=True)),
("primary_address", models.ForeignKey("a.Address", models.CASCADE)),
],
bases=(AbstractBaseUser,),
)
changes = self.get_changes([], [address, tenant])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertMigrationDependencies(changes, "a", 0, [])
self.assertMigrationDependencies(
changes, "a", 1, [("__setting__", "AUTH_USER_MODEL"), ("a", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "b", 0, ["CreateModel"])
self.assertMigrationDependencies(changes, "b", 0, [("a", "auto_1")])
@override_settings(AUTH_USER_MODEL="a.Person")
def test_circular_dependency_swappable_self(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
person = ModelState(
"a",
"Person",
[
("id", models.AutoField(primary_key=True)),
(
"parent1",
models.ForeignKey(
settings.AUTH_USER_MODEL,
models.CASCADE,
related_name="children",
),
),
],
)
changes = self.get_changes([], [person])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 1)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertMigrationDependencies(changes, "a", 0, [])
@override_settings(AUTH_USER_MODEL="a.User")
def test_swappable_circular_multi_mti(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
parent = ModelState(
"a",
"Parent",
[("user", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE))],
)
child = ModelState("a", "Child", [], bases=("a.Parent",))
user = ModelState("a", "User", [], bases=(AbstractBaseUser, "a.Child"))
changes = self.get_changes([], [parent, child, user])
self.assertNumberMigrations(changes, "a", 1)
self.assertOperationTypes(
changes, "a", 0, ["CreateModel", "CreateModel", "CreateModel", "AddField"]
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_blank_textfield_and_charfield(self, mocked_ask_method):
"""
#23405 - Adding a NOT NULL and blank `CharField` or `TextField`
without default should not prompt for a default.
"""
changes = self.get_changes(
[self.author_empty], [self.author_with_biography_blank]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField", "AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition"
)
def test_add_non_blank_textfield_and_charfield(self, mocked_ask_method):
"""
#23405 - Adding a NOT NULL and non-blank `CharField` or `TextField`
without default should prompt for a default.
"""
changes = self.get_changes(
[self.author_empty], [self.author_with_biography_non_blank]
)
self.assertEqual(mocked_ask_method.call_count, 2)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField", "AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0)
def test_mti_inheritance_model_removal(self):
Animal = ModelState(
"app",
"Animal",
[
("id", models.AutoField(primary_key=True)),
],
)
Dog = ModelState("app", "Dog", [], bases=("app.Animal",))
changes = self.get_changes([Animal, Dog], [Animal])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["DeleteModel"])
self.assertOperationAttributes(changes, "app", 0, 0, name="Dog")
def test_add_model_with_field_removed_from_base_model(self):
"""
Removing a base field takes place before adding a new inherited model
that has a field with the same name.
"""
before = [
ModelState(
"app",
"readable",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
],
),
]
after = [
ModelState(
"app",
"readable",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"book",
[
("title", models.CharField(max_length=200)),
],
bases=("app.readable",),
),
]
changes = self.get_changes(before, after)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RemoveField", "CreateModel"])
self.assertOperationAttributes(
changes, "app", 0, 0, name="title", model_name="readable"
)
self.assertOperationAttributes(changes, "app", 0, 1, name="book")
def test_parse_number(self):
tests = [
("no_number", None),
("0001_initial", 1),
("0002_model3", 2),
("0002_auto_20380101_1112", 2),
("0002_squashed_0003", 3),
("0002_model2_squashed_0003_other4", 3),
("0002_squashed_0003_squashed_0004", 4),
("0002_model2_squashed_0003_other4_squashed_0005_other6", 5),
("0002_custom_name_20380101_1112_squashed_0003_model", 3),
("2_squashed_4", 4),
]
for migration_name, expected_number in tests:
with self.subTest(migration_name=migration_name):
self.assertEqual(
MigrationAutodetector.parse_number(migration_name),
expected_number,
)
def test_add_custom_fk_with_hardcoded_to(self):
class HardcodedForeignKey(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs["to"] = "testapp.Author"
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["to"]
return name, path, args, kwargs
book_hardcoded_fk_to = ModelState(
"testapp",
"Book",
[
("author", HardcodedForeignKey(on_delete=models.CASCADE)),
],
)
changes = self.get_changes(
[self.author_empty],
[self.author_empty, book_hardcoded_fk_to],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Book")
@ignore_warnings(category=RemovedInDjango51Warning)
class AutodetectorIndexTogetherTests(BaseAutodetectorTests):
book_index_together = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("author", "title")},
},
)
book_index_together_2 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "author")},
},
)
book_index_together_3 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "newfield")},
},
)
book_index_together_4 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield2", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "newfield2")},
},
)
def test_empty_index_together(self):
"""Empty index_together shouldn't generate a migration."""
# Explicitly testing for not specified, since this is the case after
# a CreateModel operation w/o any definition on the original model
model_state_not_specified = ModelState(
"a", "model", [("id", models.AutoField(primary_key=True))]
)
# Explicitly testing for None, since this was the issue in #23452 after
# an AlterIndexTogether operation with e.g. () as value
model_state_none = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"index_together": None,
},
)
# Explicitly testing for the empty set, since we now always have sets.
# During removal (('col1', 'col2'),) --> () this becomes set([])
model_state_empty = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"index_together": set(),
},
)
def test(from_state, to_state, msg):
changes = self.get_changes([from_state], [to_state])
if changes:
ops = ", ".join(
o.__class__.__name__ for o in changes["a"][0].operations
)
self.fail("Created operation(s) %s from %s" % (ops, msg))
tests = (
(
model_state_not_specified,
model_state_not_specified,
'"not specified" to "not specified"',
),
(model_state_not_specified, model_state_none, '"not specified" to "None"'),
(
model_state_not_specified,
model_state_empty,
'"not specified" to "empty"',
),
(model_state_none, model_state_not_specified, '"None" to "not specified"'),
(model_state_none, model_state_none, '"None" to "None"'),
(model_state_none, model_state_empty, '"None" to "empty"'),
(
model_state_empty,
model_state_not_specified,
'"empty" to "not specified"',
),
(model_state_empty, model_state_none, '"empty" to "None"'),
(model_state_empty, model_state_empty, '"empty" to "empty"'),
)
for t in tests:
test(*t)
def test_rename_index_together_to_index(self):
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, AutodetectorTests.book_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RenameIndex"])
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
model_name="book",
new_name="book_title_author_idx",
old_fields=("author", "title"),
)
def test_rename_index_together_to_index_extra_options(self):
# Indexes with extra options don't match indexes in index_together.
book_partial_index = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(
fields=["author", "title"],
condition=models.Q(title__startswith="The"),
name="book_title_author_idx",
)
],
},
)
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, book_partial_index],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterIndexTogether", "AddIndex"],
)
def test_rename_index_together_to_index_order_fields(self):
# Indexes with reordered fields don't match indexes in index_together.
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, AutodetectorTests.book_unordered_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterIndexTogether", "AddIndex"],
)
def test_add_index_together(self):
changes = self.get_changes(
[AutodetectorTests.author_empty, AutodetectorTests.book],
[AutodetectorTests.author_empty, self.book_index_together],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterIndexTogether"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", index_together={("author", "title")}
)
def test_remove_index_together(self):
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, AutodetectorTests.book],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterIndexTogether"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", index_together=set()
)
def test_index_together_remove_fk(self):
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, AutodetectorTests.book_with_no_author],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterIndexTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", index_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", name="author"
)
def test_index_together_no_changes(self):
"""
index_together doesn't generate a migration if no changes have been
made.
"""
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, self.book_index_together],
)
self.assertEqual(len(changes), 0)
def test_index_together_ordering(self):
"""index_together triggers on ordering changes."""
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, self.book_index_together_2],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
index_together={("title", "author")},
)
def test_add_field_and_index_together(self):
"""
Added fields will be created before using them in index_together.
"""
changes = self.get_changes(
[AutodetectorTests.author_empty, AutodetectorTests.book],
[AutodetectorTests.author_empty, self.book_index_together_3],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AddField", "AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
index_together={("title", "newfield")},
)
def test_create_model_and_index_together(self):
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
)
book_with_author = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("otherapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "author")},
},
)
changes = self.get_changes(
[AutodetectorTests.book_with_no_author], [author, book_with_author]
)
self.assertEqual(len(changes["otherapp"]), 1)
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 3)
self.assertOperationTypes(
changes,
"otherapp",
0,
["CreateModel", "AddField", "AlterIndexTogether"],
)
def test_remove_field_and_index_together(self):
"""
Removed fields will be removed after updating index_together.
"""
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together_3],
[AutodetectorTests.author_empty, self.book_index_together],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterIndexTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
index_together={("author", "title")},
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
model_name="book",
name="newfield",
)
def test_alter_field_and_index_together(self):
"""Fields are altered after deleting some index_together."""
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField(db_index=True)),
],
{
"index_together": {("name",)},
},
)
author_reversed_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, unique=True)),
("age", models.IntegerField()),
],
{
"index_together": {("age",)},
},
)
changes = self.get_changes([initial_author], [author_reversed_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterIndexTogether",
"AlterField",
"AlterField",
"AlterIndexTogether",
],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
index_together=set(),
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
model_name="author",
name="age",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
2,
model_name="author",
name="name",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
3,
name="author",
index_together={("age",)},
)
def test_partly_alter_index_together_increase(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"index_together": {("name",)},
},
)
author_new_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"index_together": {("name",), ("age",)},
},
)
changes = self.get_changes([initial_author], [author_new_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
index_together={("name",), ("age",)},
)
def test_partly_alter_index_together_decrease(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"index_together": {("name",), ("age",)},
},
)
author_new_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"index_together": {("age",)},
},
)
changes = self.get_changes([initial_author], [author_new_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
index_together={("age",)},
)
def test_rename_field_and_index_together(self):
"""Fields are renamed before updating index_together."""
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together_3],
[AutodetectorTests.author_empty, self.book_index_together_4],
MigrationQuestioner({"ask_rename": True}),
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["RenameField", "AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
index_together={("title", "newfield2")},
)
def test_add_model_order_with_respect_to_index_together(self):
changes = self.get_changes(
[],
[
AutodetectorTests.book,
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"index_together": {("name", "_order")},
},
),
],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={
"order_with_respect_to": "book",
"index_together": {("name", "_order")},
},
)
def test_set_alter_order_with_respect_to_index_together(self):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"index_together": {("name", "_order")},
},
)
changes = self.get_changes(
[AutodetectorTests.book, AutodetectorTests.author_with_book],
[AutodetectorTests.book, after],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterOrderWithRespectTo", "AlterIndexTogether"],
)
class MigrationSuggestNameTests(SimpleTestCase):
def test_no_operations(self):
class Migration(migrations.Migration):
operations = []
migration = Migration("some_migration", "test_app")
self.assertIs(migration.suggest_name().startswith("auto_"), True)
def test_no_operations_initial(self):
class Migration(migrations.Migration):
initial = True
operations = []
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_single_operation(self):
class Migration(migrations.Migration):
operations = [migrations.CreateModel("Person", fields=[])]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "person")
class Migration(migrations.Migration):
operations = [migrations.DeleteModel("Person")]
migration = Migration("0002_initial", "test_app")
self.assertEqual(migration.suggest_name(), "delete_person")
def test_single_operation_long_name(self):
class Migration(migrations.Migration):
operations = [migrations.CreateModel("A" * 53, fields=[])]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "a" * 53)
def test_two_operations(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.DeleteModel("Animal"),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "person_delete_animal")
def test_two_create_models(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.CreateModel("Animal", fields=[]),
]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "person_animal")
def test_two_create_models_with_initial_true(self):
class Migration(migrations.Migration):
initial = True
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.CreateModel("Animal", fields=[]),
]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_many_operations_suffix(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person1", fields=[]),
migrations.CreateModel("Person2", fields=[]),
migrations.CreateModel("Person3", fields=[]),
migrations.DeleteModel("Person4"),
migrations.DeleteModel("Person5"),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(
migration.suggest_name(),
"person1_person2_person3_delete_person4_and_more",
)
def test_operation_with_no_suggested_name(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.RunSQL("SELECT 1 FROM person;"),
]
migration = Migration("some_migration", "test_app")
self.assertIs(migration.suggest_name().startswith("auto_"), True)
def test_operation_with_invalid_chars_in_suggested_name(self):
class Migration(migrations.Migration):
operations = [
migrations.AddConstraint(
"Person",
models.UniqueConstraint(
fields=["name"], name="person.name-*~unique!"
),
),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "person_person_name_unique_")
def test_none_name(self):
class Migration(migrations.Migration):
operations = [migrations.RunSQL("SELECT 1 FROM person;")]
migration = Migration("0001_initial", "test_app")
suggest_name = migration.suggest_name()
self.assertIs(suggest_name.startswith("auto_"), True)
def test_none_name_with_initial_true(self):
class Migration(migrations.Migration):
initial = True
operations = [migrations.RunSQL("SELECT 1 FROM person;")]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_auto(self):
migration = migrations.Migration("0001_initial", "test_app")
suggest_name = migration.suggest_name()
self.assertIs(suggest_name.startswith("auto_"), True)
|
85aa0f4ee0034f32cf9e3b6a0927f793d4666a2cdabf916be25eb1213af2c99c | import gzip
import os
import sys
import tempfile
import unittest
import warnings
from io import StringIO
from unittest import mock
from django.apps import apps
from django.contrib.sites.models import Site
from django.core import management
from django.core.files.temp import NamedTemporaryFile
from django.core.management import CommandError
from django.core.management.commands.dumpdata import ProxyModelWarning
from django.core.serializers.base import ProgressBar
from django.db import IntegrityError, connection
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import (
Article,
Category,
CircularA,
CircularB,
NaturalKeyThing,
PrimaryKeyUUIDModel,
ProxySpy,
Spy,
Tag,
Visa,
)
try:
import bz2 # NOQA
HAS_BZ2 = True
except ImportError:
HAS_BZ2 = False
try:
import lzma # NOQA
HAS_LZMA = True
except ImportError:
HAS_LZMA = False
class TestCaseFixtureLoadingTests(TestCase):
fixtures = ["fixture1.json", "fixture2.json"]
def test_class_fixtures(self):
"Test case has installed 3 fixture objects"
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"Django conquers world!",
"Copyright is fine the way it is",
"Poker has no place on ESPN",
],
)
class SubclassTestCaseFixtureLoadingTests(TestCaseFixtureLoadingTests):
"""
Make sure that subclasses can remove fixtures from parent class (#21089).
"""
fixtures = []
def test_class_fixtures(self):
"There were no fixture objects installed"
self.assertEqual(Article.objects.count(), 0)
class DumpDataAssertMixin:
def _dumpdata_assert(
self,
args,
output,
format="json",
filename=None,
natural_foreign_keys=False,
natural_primary_keys=False,
use_base_manager=False,
exclude_list=[],
primary_keys="",
):
new_io = StringIO()
filename = filename and os.path.join(tempfile.gettempdir(), filename)
management.call_command(
"dumpdata",
*args,
format=format,
stdout=new_io,
stderr=new_io,
output=filename,
use_natural_foreign_keys=natural_foreign_keys,
use_natural_primary_keys=natural_primary_keys,
use_base_manager=use_base_manager,
exclude=exclude_list,
primary_keys=primary_keys,
)
if filename:
file_root, file_ext = os.path.splitext(filename)
compression_formats = {
".bz2": (open, file_root),
".gz": (gzip.open, filename),
".lzma": (open, file_root),
".xz": (open, file_root),
".zip": (open, file_root),
}
if HAS_BZ2:
compression_formats[".bz2"] = (bz2.open, filename)
if HAS_LZMA:
compression_formats[".lzma"] = (lzma.open, filename)
compression_formats[".xz"] = (lzma.open, filename)
try:
open_method, file_path = compression_formats[file_ext]
except KeyError:
open_method, file_path = open, filename
with open_method(file_path, "rt") as f:
command_output = f.read()
os.remove(file_path)
else:
command_output = new_io.getvalue().strip()
if format == "json":
self.assertJSONEqual(command_output, output)
elif format == "xml":
self.assertXMLEqual(command_output, output)
else:
self.assertEqual(command_output, output)
class FixtureLoadingTests(DumpDataAssertMixin, TestCase):
def test_loading_and_dumping(self):
apps.clear_cache()
Site.objects.all().delete()
# Load fixture 1. Single JSON file, with two objects.
management.call_command("loaddata", "fixture1.json", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
["Time to reform copyright", "Poker has no place on ESPN"],
)
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Try just dumping the contents of fixtures.Category
self._dumpdata_assert(
["fixtures.Category"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}]',
)
# ...and just fixtures.Article
self._dumpdata_assert(
["fixtures.Article"],
'[{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# ...and both
self._dumpdata_assert(
["fixtures.Category", "fixtures.Article"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Specify a specific model twice
self._dumpdata_assert(
["fixtures.Article", "fixtures.Article"],
(
'[{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]'
),
)
# Specify a dump that specifies Article both explicitly and implicitly
self._dumpdata_assert(
["fixtures.Article", "fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Specify a dump that specifies Article both explicitly and implicitly,
# but lists the app first (#22025).
self._dumpdata_assert(
["fixtures", "fixtures.Article"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Same again, but specify in the reverse order
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Specify one model from one application, and an entire other application.
self._dumpdata_assert(
["fixtures.Category", "sites"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 1, "model": "sites.site", "fields": '
'{"domain": "example.com", "name": "example.com"}}]',
)
# Load fixture 2. JSON file imported by default. Overwrites some
# existing objects.
management.call_command("loaddata", "fixture2.json", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"Django conquers world!",
"Copyright is fine the way it is",
"Poker has no place on ESPN",
],
)
# Load fixture 3, XML format.
management.call_command("loaddata", "fixture3.xml", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"XML identified as leading cause of cancer",
"Django conquers world!",
"Copyright is fine the way it is",
"Poker on TV is great!",
],
)
# Load fixture 6, JSON file with dynamic ContentType fields. Testing ManyToOne.
management.call_command("loaddata", "fixture6.json", verbosity=0)
self.assertQuerySetEqual(
Tag.objects.all(),
[
'<Tag: <Article: Copyright is fine the way it is> tagged "copyright">',
'<Tag: <Article: Copyright is fine the way it is> tagged "law">',
],
transform=repr,
ordered=False,
)
# Load fixture 7, XML file with dynamic ContentType fields. Testing ManyToOne.
management.call_command("loaddata", "fixture7.xml", verbosity=0)
self.assertQuerySetEqual(
Tag.objects.all(),
[
'<Tag: <Article: Copyright is fine the way it is> tagged "copyright">',
'<Tag: <Article: Copyright is fine the way it is> tagged "legal">',
'<Tag: <Article: Django conquers world!> tagged "django">',
'<Tag: <Article: Django conquers world!> tagged "world domination">',
],
transform=repr,
ordered=False,
)
# Load fixture 8, JSON file with dynamic Permission fields. Testing ManyToMany.
management.call_command("loaddata", "fixture8.json", verbosity=0)
self.assertQuerySetEqual(
Visa.objects.all(),
[
"<Visa: Django Reinhardt Can add user, Can change user, Can delete "
"user>",
"<Visa: Stephane Grappelli Can add user>",
"<Visa: Prince >",
],
transform=repr,
ordered=False,
)
# Load fixture 9, XML file with dynamic Permission fields. Testing ManyToMany.
management.call_command("loaddata", "fixture9.xml", verbosity=0)
self.assertQuerySetEqual(
Visa.objects.all(),
[
"<Visa: Django Reinhardt Can add user, Can change user, Can delete "
"user>",
"<Visa: Stephane Grappelli Can add user, Can delete user>",
'<Visa: Artist formerly known as "Prince" Can change user>',
],
transform=repr,
ordered=False,
)
# object list is unaffected
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"XML identified as leading cause of cancer",
"Django conquers world!",
"Copyright is fine the way it is",
"Poker on TV is great!",
],
)
# By default, you get raw keys on dumpdata
self._dumpdata_assert(
["fixtures.book"],
'[{"pk": 1, "model": "fixtures.book", "fields": '
'{"name": "Music for all ages", "authors": [3, 1]}}]',
)
# But you can get natural keys if you ask for them and they are available
self._dumpdata_assert(
["fixtures.book"],
'[{"pk": 1, "model": "fixtures.book", "fields": '
'{"name": "Music for all ages", "authors": '
'[["Artist formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]',
natural_foreign_keys=True,
)
# You can also omit the primary keys for models that we can get later
# with natural keys.
self._dumpdata_assert(
["fixtures.person"],
'[{"fields": {"name": "Django Reinhardt"}, "model": "fixtures.person"}, '
'{"fields": {"name": "Stephane Grappelli"}, "model": "fixtures.person"}, '
'{"fields": {"name": "Artist formerly known as \\"Prince\\""}, '
'"model": "fixtures.person"}]',
natural_primary_keys=True,
)
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker on TV is great!", '
'"pub_date": "2006-06-16T11:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", '
'"pub_date": "2006-06-16T14:00:00"}}, '
'{"pk": 4, "model": "fixtures.article", "fields": '
'{"headline": "Django conquers world!", '
'"pub_date": "2006-06-16T15:00:00"}}, '
'{"pk": 5, "model": "fixtures.article", "fields": '
'{"headline": "XML identified as leading cause of cancer", '
'"pub_date": "2006-06-16T16:00:00"}}, '
'{"pk": 1, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "copyright", '
'"tagged_id": 3}}, '
'{"pk": 2, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "legal", '
'"tagged_id": 3}}, '
'{"pk": 3, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "django", '
'"tagged_id": 4}}, '
'{"pk": 4, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "world domination", '
'"tagged_id": 4}}, '
'{"pk": 1, "model": "fixtures.person", '
'"fields": {"name": "Django Reinhardt"}}, '
'{"pk": 2, "model": "fixtures.person", '
'"fields": {"name": "Stephane Grappelli"}}, '
'{"pk": 3, "model": "fixtures.person", '
'"fields": {"name": "Artist formerly known as \\"Prince\\""}}, '
'{"pk": 1, "model": "fixtures.visa", '
'"fields": {"person": ["Django Reinhardt"], "permissions": '
'[["add_user", "auth", "user"], ["change_user", "auth", "user"], '
'["delete_user", "auth", "user"]]}}, '
'{"pk": 2, "model": "fixtures.visa", "fields": '
'{"person": ["Stephane Grappelli"], "permissions": '
'[["add_user", "auth", "user"], ["delete_user", "auth", "user"]]}}, '
'{"pk": 3, "model": "fixtures.visa", "fields": '
'{"person": ["Artist formerly known as \\"Prince\\""], "permissions": '
'[["change_user", "auth", "user"]]}}, '
'{"pk": 1, "model": "fixtures.book", "fields": '
'{"name": "Music for all ages", "authors": '
'[["Artist formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]',
natural_foreign_keys=True,
)
# Dump the current contents of the database as an XML fixture
self._dumpdata_assert(
["fixtures"],
'<?xml version="1.0" encoding="utf-8"?><django-objects version="1.0">'
'<object pk="1" model="fixtures.category">'
'<field type="CharField" name="title">News Stories</field>'
'<field type="TextField" name="description">Latest news stories</field>'
"</object>"
'<object pk="2" model="fixtures.article">'
'<field type="CharField" name="headline">Poker on TV is great!</field>'
'<field type="DateTimeField" name="pub_date">2006-06-16T11:00:00</field>'
"</object>"
'<object pk="3" model="fixtures.article">'
'<field type="CharField" name="headline">Copyright is fine the way it is'
"</field>"
'<field type="DateTimeField" name="pub_date">2006-06-16T14:00:00</field>'
"</object>"
'<object pk="4" model="fixtures.article">'
'<field type="CharField" name="headline">Django conquers world!</field>'
'<field type="DateTimeField" name="pub_date">2006-06-16T15:00:00</field>'
"</object>"
'<object pk="5" model="fixtures.article">'
'<field type="CharField" name="headline">'
"XML identified as leading cause of cancer</field>"
'<field type="DateTimeField" name="pub_date">2006-06-16T16:00:00</field>'
"</object>"
'<object pk="1" model="fixtures.tag">'
'<field type="CharField" name="name">copyright</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural>'
"</field>"
'<field type="PositiveIntegerField" name="tagged_id">3</field>'
"</object>"
'<object pk="2" model="fixtures.tag">'
'<field type="CharField" name="name">legal</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural>'
"</field>"
'<field type="PositiveIntegerField" name="tagged_id">3</field></object>'
'<object pk="3" model="fixtures.tag">'
'<field type="CharField" name="name">django</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural>'
"</field>"
'<field type="PositiveIntegerField" name="tagged_id">4</field>'
"</object>"
'<object pk="4" model="fixtures.tag">'
'<field type="CharField" name="name">world domination</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural>'
"</field>"
'<field type="PositiveIntegerField" name="tagged_id">4</field>'
"</object>"
'<object pk="1" model="fixtures.person">'
'<field type="CharField" name="name">Django Reinhardt</field>'
"</object>"
'<object pk="2" model="fixtures.person">'
'<field type="CharField" name="name">Stephane Grappelli</field>'
"</object>"
'<object pk="3" model="fixtures.person">'
'<field type="CharField" name="name">Artist formerly known as "Prince"'
"</field>"
"</object>"
'<object pk="1" model="fixtures.visa">'
'<field to="fixtures.person" name="person" rel="ManyToOneRel">'
"<natural>Django Reinhardt</natural></field>"
'<field to="auth.permission" name="permissions" rel="ManyToManyRel">'
"<object><natural>add_user</natural><natural>auth</natural>"
"<natural>user</natural></object><object><natural>change_user</natural>"
"<natural>auth</natural><natural>user</natural></object>"
"<object><natural>delete_user</natural><natural>auth</natural>"
"<natural>user</natural></object></field>"
"</object>"
'<object pk="2" model="fixtures.visa">'
'<field to="fixtures.person" name="person" rel="ManyToOneRel">'
"<natural>Stephane Grappelli</natural></field>"
'<field to="auth.permission" name="permissions" rel="ManyToManyRel">'
"<object><natural>add_user</natural><natural>auth</natural>"
"<natural>user</natural></object>"
"<object><natural>delete_user</natural><natural>auth</natural>"
"<natural>user</natural></object></field>"
"</object>"
'<object pk="3" model="fixtures.visa">'
'<field to="fixtures.person" name="person" rel="ManyToOneRel">'
'<natural>Artist formerly known as "Prince"</natural></field>'
'<field to="auth.permission" name="permissions" rel="ManyToManyRel">'
"<object><natural>change_user</natural><natural>auth</natural>"
"<natural>user</natural></object></field>"
"</object>"
'<object pk="1" model="fixtures.book">'
'<field type="CharField" name="name">Music for all ages</field>'
'<field to="fixtures.person" name="authors" rel="ManyToManyRel">'
'<object><natural>Artist formerly known as "Prince"</natural></object>'
"<object><natural>Django Reinhardt</natural></object></field>"
"</object></django-objects>",
format="xml",
natural_foreign_keys=True,
)
def test_dumpdata_with_excludes(self):
# Load fixture1 which has a site, two articles, and a category
Site.objects.all().delete()
management.call_command("loaddata", "fixture1.json", verbosity=0)
# Excluding fixtures app should only leave sites
self._dumpdata_assert(
["sites", "fixtures"],
'[{"pk": 1, "model": "sites.site", "fields": '
'{"domain": "example.com", "name": "example.com"}}]',
exclude_list=["fixtures"],
)
# Excluding fixtures.Article/Book should leave fixtures.Category
self._dumpdata_assert(
["sites", "fixtures"],
'[{"pk": 1, "model": "sites.site", '
'"fields": {"domain": "example.com", "name": "example.com"}}, '
'{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}]',
exclude_list=["fixtures.Article", "fixtures.Book"],
)
# Excluding fixtures and fixtures.Article/Book should be a no-op
self._dumpdata_assert(
["sites", "fixtures"],
'[{"pk": 1, "model": "sites.site", '
'"fields": {"domain": "example.com", "name": "example.com"}}, '
'{"pk": 1, "model": "fixtures.category", '
'"fields": {"description": "Latest news stories", '
'"title": "News Stories"}}]',
exclude_list=["fixtures.Article", "fixtures.Book"],
)
# Excluding sites and fixtures.Article/Book should only leave fixtures.Category
self._dumpdata_assert(
["sites", "fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}]',
exclude_list=["fixtures.Article", "fixtures.Book", "sites"],
)
# Excluding a bogus app should throw an error
with self.assertRaisesMessage(
management.CommandError, "No installed app with label 'foo_app'."
):
self._dumpdata_assert(["fixtures", "sites"], "", exclude_list=["foo_app"])
# Excluding a bogus model should throw an error
with self.assertRaisesMessage(
management.CommandError, "Unknown model: fixtures.FooModel"
):
self._dumpdata_assert(
["fixtures", "sites"], "", exclude_list=["fixtures.FooModel"]
)
@unittest.skipIf(
sys.platform == "win32", "Windows doesn't support '?' in filenames."
)
def test_load_fixture_with_special_characters(self):
management.call_command("loaddata", "fixture_with[special]chars", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"How To Deal With Special Characters",
)
def test_dumpdata_with_filtering_manager(self):
spy1 = Spy.objects.create(name="Paul")
spy2 = Spy.objects.create(name="Alex", cover_blown=True)
self.assertSequenceEqual(Spy.objects.all(), [spy1])
# Use the default manager
self._dumpdata_assert(
["fixtures.Spy"],
'[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": false}}]'
% spy1.pk,
)
# Dump using Django's base manager. Should return all objects,
# even those normally filtered by the manager
self._dumpdata_assert(
["fixtures.Spy"],
'[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": true}}, '
'{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": false}}]'
% (spy2.pk, spy1.pk),
use_base_manager=True,
)
def test_dumpdata_with_pks(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
management.call_command("loaddata", "fixture2.json", verbosity=0)
self._dumpdata_assert(
["fixtures.Article"],
'[{"pk": 2, "model": "fixtures.article", '
'"fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", '
'"pub_date": "2006-06-16T14:00:00"}}]',
primary_keys="2,3",
)
self._dumpdata_assert(
["fixtures.Article"],
'[{"pk": 2, "model": "fixtures.article", '
'"fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}]',
primary_keys="2",
)
with self.assertRaisesMessage(
management.CommandError, "You can only use --pks option with one model"
):
self._dumpdata_assert(
["fixtures"],
'[{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", '
'"pub_date": "2006-06-16T14:00:00"}}]',
primary_keys="2,3",
)
with self.assertRaisesMessage(
management.CommandError, "You can only use --pks option with one model"
):
self._dumpdata_assert(
"",
'[{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", '
'"pub_date": "2006-06-16T14:00:00"}}]',
primary_keys="2,3",
)
with self.assertRaisesMessage(
management.CommandError, "You can only use --pks option with one model"
):
self._dumpdata_assert(
["fixtures.Article", "fixtures.category"],
'[{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", '
'"pub_date": "2006-06-16T14:00:00"}}]',
primary_keys="2,3",
)
def test_dumpdata_with_uuid_pks(self):
m1 = PrimaryKeyUUIDModel.objects.create()
m2 = PrimaryKeyUUIDModel.objects.create()
output = StringIO()
management.call_command(
"dumpdata",
"fixtures.PrimaryKeyUUIDModel",
"--pks",
", ".join([str(m1.id), str(m2.id)]),
stdout=output,
)
result = output.getvalue()
self.assertIn('"pk": "%s"' % m1.id, result)
self.assertIn('"pk": "%s"' % m2.id, result)
def test_dumpdata_with_file_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json",
)
def test_dumpdata_with_file_gzip_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json.gz",
)
@unittest.skipUnless(HAS_BZ2, "No bz2 library detected.")
def test_dumpdata_with_file_bz2_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json.bz2",
)
@unittest.skipUnless(HAS_LZMA, "No lzma library detected.")
def test_dumpdata_with_file_lzma_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json.lzma",
)
@unittest.skipUnless(HAS_LZMA, "No lzma library detected.")
def test_dumpdata_with_file_xz_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json.xz",
)
def test_dumpdata_with_file_zip_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
msg = "Unsupported file extension (.zip). Fixtures saved in 'dumpdata.json'."
with self.assertWarnsMessage(RuntimeWarning, msg):
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json.zip",
)
def test_dumpdata_progressbar(self):
"""
Dumpdata shows a progress bar on the command line when --output is set,
stdout is a tty, and verbosity > 0.
"""
management.call_command("loaddata", "fixture1.json", verbosity=0)
new_io = StringIO()
new_io.isatty = lambda: True
with NamedTemporaryFile() as file:
options = {
"format": "json",
"stdout": new_io,
"stderr": new_io,
"output": file.name,
}
management.call_command("dumpdata", "fixtures", **options)
self.assertTrue(
new_io.getvalue().endswith(
"[" + "." * ProgressBar.progress_width + "]\n"
)
)
# Test no progress bar when verbosity = 0
options["verbosity"] = 0
new_io = StringIO()
new_io.isatty = lambda: True
options.update({"stdout": new_io, "stderr": new_io})
management.call_command("dumpdata", "fixtures", **options)
self.assertEqual(new_io.getvalue(), "")
def test_dumpdata_proxy_without_concrete(self):
"""
A warning is displayed if a proxy model is dumped without its concrete
parent.
"""
ProxySpy.objects.create(name="Paul")
msg = "fixtures.ProxySpy is a proxy model and won't be serialized."
with self.assertWarnsMessage(ProxyModelWarning, msg):
self._dumpdata_assert(["fixtures.ProxySpy"], "[]")
def test_dumpdata_proxy_with_concrete(self):
"""
A warning isn't displayed if a proxy model is dumped with its concrete
parent.
"""
spy = ProxySpy.objects.create(name="Paul")
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter("always")
self._dumpdata_assert(
["fixtures.ProxySpy", "fixtures.Spy"],
'[{"pk": %d, "model": "fixtures.spy", '
'"fields": {"cover_blown": false}}]' % spy.pk,
)
self.assertEqual(len(warning_list), 0)
def test_compress_format_loading(self):
# Load fixture 4 (compressed), using format specification
management.call_command("loaddata", "fixture4.json", verbosity=0)
self.assertEqual(Article.objects.get().headline, "Django pets kitten")
def test_compressed_specified_loading(self):
# Load fixture 5 (compressed), using format *and* compression specification
management.call_command("loaddata", "fixture5.json.zip", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
def test_compressed_loading(self):
# Load fixture 5 (compressed), only compression specification
management.call_command("loaddata", "fixture5.zip", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
def test_compressed_loading_gzip(self):
management.call_command("loaddata", "fixture5.json.gz", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
@unittest.skipUnless(HAS_BZ2, "No bz2 library detected.")
def test_compressed_loading_bz2(self):
management.call_command("loaddata", "fixture5.json.bz2", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
@unittest.skipUnless(HAS_LZMA, "No lzma library detected.")
def test_compressed_loading_lzma(self):
management.call_command("loaddata", "fixture5.json.lzma", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
@unittest.skipUnless(HAS_LZMA, "No lzma library detected.")
def test_compressed_loading_xz(self):
management.call_command("loaddata", "fixture5.json.xz", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
def test_ambiguous_compressed_fixture(self):
# The name "fixture5" is ambiguous, so loading raises an error.
msg = "Multiple fixtures named 'fixture5'"
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command("loaddata", "fixture5", verbosity=0)
def test_db_loading(self):
# Load db fixtures 1 and 2. These will load using the 'default'
# database identifier implicitly.
management.call_command("loaddata", "db_fixture_1", verbosity=0)
management.call_command("loaddata", "db_fixture_2", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"Who needs more than one database?",
"Who needs to use compressed data?",
],
)
def test_loaddata_error_message(self):
"""
Loading a fixture which contains an invalid object outputs an error
message which contains the pk of the object that triggered the error.
"""
# MySQL needs a little prodding to reject invalid data.
# This won't affect other tests because the database connection
# is closed at the end of each test.
if connection.vendor == "mysql":
with connection.cursor() as cursor:
cursor.execute("SET sql_mode = 'TRADITIONAL'")
msg = "Could not load fixtures.Article(pk=1):"
with self.assertRaisesMessage(IntegrityError, msg):
management.call_command("loaddata", "invalid.json", verbosity=0)
@skipUnlessDBFeature("prohibits_null_characters_in_text_exception")
def test_loaddata_null_characters_on_postgresql(self):
error, msg = connection.features.prohibits_null_characters_in_text_exception
msg = f"Could not load fixtures.Article(pk=2): {msg}"
with self.assertRaisesMessage(error, msg):
management.call_command("loaddata", "null_character_in_field_value.json")
def test_loaddata_app_option(self):
with self.assertRaisesMessage(
CommandError, "No fixture named 'db_fixture_1' found."
):
management.call_command(
"loaddata", "db_fixture_1", verbosity=0, app_label="someotherapp"
)
self.assertQuerySetEqual(Article.objects.all(), [])
management.call_command(
"loaddata", "db_fixture_1", verbosity=0, app_label="fixtures"
)
self.assertEqual(
Article.objects.get().headline,
"Who needs more than one database?",
)
def test_loaddata_verbosity_three(self):
output = StringIO()
management.call_command(
"loaddata", "fixture1.json", verbosity=3, stdout=output, stderr=output
)
command_output = output.getvalue()
self.assertIn(
"\rProcessed 1 object(s).\rProcessed 2 object(s)."
"\rProcessed 3 object(s).\rProcessed 4 object(s).\n",
command_output,
)
def test_loading_using(self):
# Load fixtures 1 and 2. These will load using the 'default' database
# identifier explicitly.
management.call_command(
"loaddata", "db_fixture_1", verbosity=0, database="default"
)
management.call_command(
"loaddata", "db_fixture_2", verbosity=0, database="default"
)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"Who needs more than one database?",
"Who needs to use compressed data?",
],
)
def test_unmatched_identifier_loading(self):
# Db fixture 3 won't load because the database identifier doesn't
# match.
with self.assertRaisesMessage(
CommandError, "No fixture named 'db_fixture_3' found."
):
management.call_command("loaddata", "db_fixture_3", verbosity=0)
with self.assertRaisesMessage(
CommandError, "No fixture named 'db_fixture_3' found."
):
management.call_command(
"loaddata", "db_fixture_3", verbosity=0, database="default"
)
self.assertQuerySetEqual(Article.objects.all(), [])
def test_output_formats(self):
# Load back in fixture 1, we need the articles from it
management.call_command("loaddata", "fixture1", verbosity=0)
# Try to load fixture 6 using format discovery
management.call_command("loaddata", "fixture6", verbosity=0)
self.assertQuerySetEqual(
Tag.objects.all(),
[
'<Tag: <Article: Time to reform copyright> tagged "copyright">',
'<Tag: <Article: Time to reform copyright> tagged "law">',
],
transform=repr,
ordered=False,
)
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}, '
'{"pk": 1, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "copyright", '
'"tagged_id": 3}}, '
'{"pk": 2, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "law", "tagged_id": 3}}, '
'{"pk": 1, "model": "fixtures.person", "fields": '
'{"name": "Django Reinhardt"}}, '
'{"pk": 2, "model": "fixtures.person", "fields": '
'{"name": "Stephane Grappelli"}}, '
'{"pk": 3, "model": "fixtures.person", "fields": {"name": "Prince"}}]',
natural_foreign_keys=True,
)
# Dump the current contents of the database as an XML fixture
self._dumpdata_assert(
["fixtures"],
'<?xml version="1.0" encoding="utf-8"?><django-objects version="1.0">'
'<object pk="1" model="fixtures.category">'
'<field type="CharField" name="title">News Stories</field>'
'<field type="TextField" name="description">Latest news stories</field>'
"</object>"
'<object pk="2" model="fixtures.article">'
'<field type="CharField" name="headline">Poker has no place on ESPN</field>'
'<field type="DateTimeField" name="pub_date">2006-06-16T12:00:00</field>'
"</object>"
'<object pk="3" model="fixtures.article">'
'<field type="CharField" name="headline">Time to reform copyright</field>'
'<field type="DateTimeField" name="pub_date">2006-06-16T13:00:00</field>'
"</object>"
'<object pk="1" model="fixtures.tag">'
'<field type="CharField" name="name">copyright</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural>'
"<natural>article</natural></field>"
'<field type="PositiveIntegerField" name="tagged_id">3</field>'
"</object>"
'<object pk="2" model="fixtures.tag">'
'<field type="CharField" name="name">law</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural>'
"<natural>article</natural></field>"
'<field type="PositiveIntegerField" name="tagged_id">3</field>'
"</object>"
'<object pk="1" model="fixtures.person">'
'<field type="CharField" name="name">Django Reinhardt</field>'
"</object>"
'<object pk="2" model="fixtures.person">'
'<field type="CharField" name="name">Stephane Grappelli</field>'
"</object>"
'<object pk="3" model="fixtures.person">'
'<field type="CharField" name="name">Prince</field>'
"</object></django-objects>",
format="xml",
natural_foreign_keys=True,
)
def test_loading_with_exclude_app(self):
Site.objects.all().delete()
management.call_command(
"loaddata", "fixture1", exclude=["fixtures"], verbosity=0
)
self.assertFalse(Article.objects.exists())
self.assertFalse(Category.objects.exists())
self.assertEqual(Site.objects.get().domain, "example.com")
def test_loading_with_exclude_model(self):
Site.objects.all().delete()
management.call_command(
"loaddata", "fixture1", exclude=["fixtures.Article"], verbosity=0
)
self.assertFalse(Article.objects.exists())
self.assertEqual(Category.objects.get().title, "News Stories")
self.assertEqual(Site.objects.get().domain, "example.com")
def test_exclude_option_errors(self):
"""Excluding a bogus app or model should raise an error."""
msg = "No installed app with label 'foo_app'."
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command(
"loaddata", "fixture1", exclude=["foo_app"], verbosity=0
)
msg = "Unknown model: fixtures.FooModel"
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command(
"loaddata", "fixture1", exclude=["fixtures.FooModel"], verbosity=0
)
def test_stdin_without_format(self):
"""Reading from stdin raises an error if format isn't specified."""
msg = "--format must be specified when reading from stdin."
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command("loaddata", "-", verbosity=0)
def test_loading_stdin(self):
"""Loading fixtures from stdin with json and xml."""
tests_dir = os.path.dirname(__file__)
fixture_json = os.path.join(tests_dir, "fixtures", "fixture1.json")
fixture_xml = os.path.join(tests_dir, "fixtures", "fixture3.xml")
with mock.patch(
"django.core.management.commands.loaddata.sys.stdin", open(fixture_json)
):
management.call_command("loaddata", "--format=json", "-", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
["Time to reform copyright", "Poker has no place on ESPN"],
)
with mock.patch(
"django.core.management.commands.loaddata.sys.stdin", open(fixture_xml)
):
management.call_command("loaddata", "--format=xml", "-", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"XML identified as leading cause of cancer",
"Time to reform copyright",
"Poker on TV is great!",
],
)
class NonexistentFixtureTests(TestCase):
"""
Custom class to limit fixture dirs.
"""
def test_loaddata_not_existent_fixture_file(self):
stdout_output = StringIO()
with self.assertRaisesMessage(
CommandError, "No fixture named 'this_fixture_doesnt_exist' found."
):
management.call_command(
"loaddata", "this_fixture_doesnt_exist", stdout=stdout_output
)
@mock.patch("django.db.connection.enable_constraint_checking")
@mock.patch("django.db.connection.disable_constraint_checking")
def test_nonexistent_fixture_no_constraint_checking(
self, disable_constraint_checking, enable_constraint_checking
):
"""
If no fixtures match the loaddata command, constraints checks on the
database shouldn't be disabled. This is performance critical on MSSQL.
"""
with self.assertRaisesMessage(
CommandError, "No fixture named 'this_fixture_doesnt_exist' found."
):
management.call_command(
"loaddata", "this_fixture_doesnt_exist", verbosity=0
)
disable_constraint_checking.assert_not_called()
enable_constraint_checking.assert_not_called()
class FixtureTransactionTests(DumpDataAssertMixin, TransactionTestCase):
available_apps = [
"fixtures",
"django.contrib.sites",
]
@skipUnlessDBFeature("supports_forward_references")
def test_format_discovery(self):
# Load fixture 1 again, using format discovery
management.call_command("loaddata", "fixture1", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
["Time to reform copyright", "Poker has no place on ESPN"],
)
# Try to load fixture 2 using format discovery; this will fail
# because there are two fixture2's in the fixtures directory
msg = "Multiple fixtures named 'fixture2'"
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command("loaddata", "fixture2", verbosity=0)
# object list is unaffected
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
["Time to reform copyright", "Poker has no place on ESPN"],
)
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Load fixture 4 (compressed), using format discovery
management.call_command("loaddata", "fixture4", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"Django pets kitten",
"Time to reform copyright",
"Poker has no place on ESPN",
],
)
class ForwardReferenceTests(DumpDataAssertMixin, TestCase):
def test_forward_reference_fk(self):
management.call_command("loaddata", "forward_reference_fk.json", verbosity=0)
t1, t2 = NaturalKeyThing.objects.all()
self.assertEqual(t1.other_thing, t2)
self.assertEqual(t2.other_thing, t1)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.naturalkeything", "pk": 1, '
'"fields": {"key": "t1", "other_thing": 2, "other_things": []}}, '
'{"model": "fixtures.naturalkeything", "pk": 2, '
'"fields": {"key": "t2", "other_thing": 1, "other_things": []}}]',
)
def test_forward_reference_fk_natural_key(self):
management.call_command(
"loaddata",
"forward_reference_fk_natural_key.json",
verbosity=0,
)
t1, t2 = NaturalKeyThing.objects.all()
self.assertEqual(t1.other_thing, t2)
self.assertEqual(t2.other_thing, t1)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.naturalkeything", '
'"fields": {"key": "t1", "other_thing": ["t2"], "other_things": []}}, '
'{"model": "fixtures.naturalkeything", '
'"fields": {"key": "t2", "other_thing": ["t1"], "other_things": []}}]',
natural_primary_keys=True,
natural_foreign_keys=True,
)
def test_forward_reference_m2m(self):
management.call_command("loaddata", "forward_reference_m2m.json", verbosity=0)
self.assertEqual(NaturalKeyThing.objects.count(), 3)
t1 = NaturalKeyThing.objects.get_by_natural_key("t1")
self.assertSequenceEqual(
t1.other_things.order_by("key").values_list("key", flat=True),
["t2", "t3"],
)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.naturalkeything", "pk": 1, '
'"fields": {"key": "t1", "other_thing": null, "other_things": [2, 3]}}, '
'{"model": "fixtures.naturalkeything", "pk": 2, '
'"fields": {"key": "t2", "other_thing": null, "other_things": []}}, '
'{"model": "fixtures.naturalkeything", "pk": 3, '
'"fields": {"key": "t3", "other_thing": null, "other_things": []}}]',
)
def test_forward_reference_m2m_natural_key(self):
management.call_command(
"loaddata",
"forward_reference_m2m_natural_key.json",
verbosity=0,
)
self.assertEqual(NaturalKeyThing.objects.count(), 3)
t1 = NaturalKeyThing.objects.get_by_natural_key("t1")
self.assertSequenceEqual(
t1.other_things.order_by("key").values_list("key", flat=True),
["t2", "t3"],
)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.naturalkeything", '
'"fields": {"key": "t1", "other_thing": null, '
'"other_things": [["t2"], ["t3"]]}}, '
'{"model": "fixtures.naturalkeything", '
'"fields": {"key": "t2", "other_thing": null, "other_things": []}}, '
'{"model": "fixtures.naturalkeything", '
'"fields": {"key": "t3", "other_thing": null, "other_things": []}}]',
natural_primary_keys=True,
natural_foreign_keys=True,
)
class CircularReferenceTests(DumpDataAssertMixin, TestCase):
def test_circular_reference(self):
management.call_command("loaddata", "circular_reference.json", verbosity=0)
obj_a = CircularA.objects.get()
obj_b = CircularB.objects.get()
self.assertEqual(obj_a.obj, obj_b)
self.assertEqual(obj_b.obj, obj_a)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.circulara", "pk": 1, '
'"fields": {"key": "x", "obj": 1}}, '
'{"model": "fixtures.circularb", "pk": 1, '
'"fields": {"key": "y", "obj": 1}}]',
)
def test_circular_reference_natural_key(self):
management.call_command(
"loaddata",
"circular_reference_natural_key.json",
verbosity=0,
)
obj_a = CircularA.objects.get()
obj_b = CircularB.objects.get()
self.assertEqual(obj_a.obj, obj_b)
self.assertEqual(obj_b.obj, obj_a)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.circulara", '
'"fields": {"key": "x", "obj": ["y"]}}, '
'{"model": "fixtures.circularb", '
'"fields": {"key": "y", "obj": ["x"]}}]',
natural_primary_keys=True,
natural_foreign_keys=True,
)
|
142d341dc6562a94c068d62f7237a797a0744e9bb0ca2e51d018f025749dba7e | import datetime
import itertools
import re
from importlib import import_module
from unittest import mock
from urllib.parse import quote, urljoin
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import BACKEND_SESSION_KEY, REDIRECT_FIELD_NAME, SESSION_KEY
from django.contrib.auth.forms import (
AuthenticationForm,
PasswordChangeForm,
SetPasswordForm,
)
from django.contrib.auth.models import Permission, User
from django.contrib.auth.views import (
INTERNAL_RESET_SESSION_TOKEN,
LoginView,
RedirectURLMixin,
logout_then_login,
redirect_to_login,
)
from django.contrib.contenttypes.models import ContentType
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from django.http import HttpRequest, HttpResponse
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import Client, TestCase, ignore_warnings, override_settings
from django.test.client import RedirectCycleError
from django.urls import NoReverseMatch, reverse, reverse_lazy
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.http import urlsafe_base64_encode
from .client import PasswordResetConfirmClient
from .models import CustomUser, UUIDUser
from .settings import AUTH_TEMPLATES
class RedirectURLMixinTests(TestCase):
@override_settings(ROOT_URLCONF="auth_tests.urls")
def test_get_default_redirect_url_next_page(self):
class RedirectURLView(RedirectURLMixin):
next_page = "/custom/"
self.assertEqual(RedirectURLView().get_default_redirect_url(), "/custom/")
def test_get_default_redirect_url_no_next_page(self):
msg = "No URL to redirect to. Provide a next_page."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
RedirectURLMixin().get_default_redirect_url()
@override_settings(
LANGUAGES=[("en", "English")],
LANGUAGE_CODE="en",
TEMPLATES=AUTH_TEMPLATES,
ROOT_URLCONF="auth_tests.urls",
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for the test classes that follow.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(
username="testclient", password="password", email="[email protected]"
)
cls.u3 = User.objects.create_user(
username="staff", password="password", email="[email protected]"
)
def login(self, username="testclient", password="password", url="/login/"):
response = self.client.post(
url,
{
"username": username,
"password": password,
},
)
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.post("/admin/logout/")
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context["form"].errors.values()))
self.assertIn(str(error), form_errors)
@override_settings(ROOT_URLCONF="django.contrib.auth.urls")
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
("login", [], {}),
("logout", [], {}),
("password_change", [], {}),
("password_change_done", [], {}),
("password_reset", [], {}),
("password_reset_done", [], {}),
(
"password_reset_confirm",
[],
{
"uidb64": "aaaaaaa",
"token": "1111-aaaaa",
},
),
("password_reset_complete", [], {}),
]
for name, args, kwargs in expected_named_urls:
with self.subTest(name=name):
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail(
"Reversal of url named '%s' failed with NoReverseMatch" % name
)
class PasswordResetTest(AuthViewsTestCase):
def setUp(self):
self.client = PasswordResetConfirmClient()
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get("/password_reset/")
self.assertEqual(response.status_code, 200)
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
"/password_reset_extra_email_context/",
{"email": "[email protected]"},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
self.assertIn("http://custom.example.com/reset/", mail.outbox[0].body)
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post(
"/password_reset/html_email_template/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), "text/plain")
self.assertEqual(message.get_payload(1).get_content_type(), "text/html")
self.assertNotIn("<html>", message.get_payload(0).get_payload())
self.assertIn("<html>", message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"""
Email is sent if a valid email address is provided for password reset
when a custom from_email is provided.
"""
response = self.client.post(
"/password_reset_from_email/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("[email protected]", mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with self.assertLogs("django.security.DisallowedHost", "ERROR"):
response = self.client.post(
"/password_reset/",
{"email": "[email protected]"},
headers={"host": "www.example:[email protected]"},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with self.assertLogs("django.security.DisallowedHost", "ERROR"):
response = self.client.post(
"/admin_password_reset/",
{"email": "[email protected]"},
headers={"host": "www.example:[email protected]"},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
def _test_confirm_start(self):
# Start by creating the email
self.client.post("/password_reset/", {"email": "[email protected]"})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch[0], urlmatch[1]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# A nonexistent user returns a 200 response, not a 404.
response = self.client.get("/reset/123456/1-1/")
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# A base36 user id that overflows int returns a 200 response.
response = self.client.get("/reset/zzzzzzzzzzzzz/1-1/")
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": " anewpassword",
},
)
# Check the password has not been changed
u = User.objects.get(email="[email protected]")
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_invalid_hash(self):
"""A POST with an invalid token is rejected."""
u = User.objects.get(email="[email protected]")
original_password = u.password
url, path = self._test_confirm_start()
path_parts = path.split("-")
path_parts[-1] = ("0") * 20 + "/"
path = "-".join(path_parts)
response = self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": "anewpassword",
},
)
self.assertIs(response.context["validlink"], False)
u.refresh_from_db()
self.assertEqual(original_password, u.password) # password hasn't changed
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
# Check the password has been changed
u = User.objects.get(email="[email protected]")
self.assertTrue(u.check_password("anewpassword"))
# The reset token is deleted from the session.
self.assertNotIn(INTERNAL_RESET_SESSION_TOKEN, self.client.session)
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "x"}
)
self.assertFormError(
response, SetPasswordForm.error_messages["password_mismatch"]
)
def test_reset_redirect_default(self):
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertRedirects(
response, "/password_reset/done/", fetch_redirect_response=False
)
def test_reset_custom_redirect(self):
response = self.client.post(
"/password_reset/custom_redirect/", {"email": "[email protected]"}
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_reset_custom_redirect_named(self):
response = self.client.post(
"/password_reset/custom_redirect/named/",
{"email": "[email protected]"},
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/named/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
def test_confirm_custom_reset_url_token(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/token/")
self.client.reset_url_token = "set-passwordcustom"
response = self.client.post(
path,
{"new_password1": "anewpassword", "new_password2": "anewpassword"},
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
def test_confirm_login_post_reset(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.ModelBackend",
"django.contrib.auth.backends.AllowAllUsersModelBackend",
]
)
def test_confirm_login_post_reset_custom_backend(self):
# This backend is specified in the URL pattern.
backend = "django.contrib.auth.backends.AllowAllUsersModelBackend"
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login_custom_backend/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
self.assertEqual(self.client.session[BACKEND_SESSION_KEY], backend)
def test_confirm_login_post_reset_already_logged_in(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login/")
self.login()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
def test_confirm_display_user_from_form(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# The password_reset_confirm() view passes the user object to the
# SetPasswordForm``, even on GET requests (#16919). For this test,
# {{ form.user }}`` is rendered in the template
# registration/password_reset_confirm.html.
username = User.objects.get(email="[email protected]").username
self.assertContains(response, "Hello, %s." % username)
# However, the view should NOT pass any user object on a form if the
# password reset link was invalid.
response = self.client.get("/reset/zzzzzzzzzzzzz/1-1/")
self.assertContains(response, "Hello, .")
def test_confirm_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
# Don't use PasswordResetConfirmClient (self.client) here which
# automatically fetches the redirect page.
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs["token"]
uuidb64 = response.resolver_match.kwargs["uidb64"]
self.assertRedirects(response, "/reset/%s/set-password/" % uuidb64)
self.assertEqual(client.session["_password_reset_token"], token)
def test_confirm_custom_reset_url_token_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/token/")
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs["token"]
uuidb64 = response.resolver_match.kwargs["uidb64"]
self.assertRedirects(
response, "/reset/custom/token/%s/set-passwordcustom/" % uuidb64
)
self.assertEqual(client.session["_password_reset_token"], token)
def test_invalid_link_if_going_directly_to_the_final_reset_password_url(self):
url, path = self._test_confirm_start()
_, uuidb64, _ = path.strip("/").split("/")
response = Client().get("/reset/%s/set-password/" % uuidb64)
self.assertContains(response, "The password reset link was invalid")
def test_missing_kwargs(self):
msg = "The URL path must contain 'uidb64' and 'token' parameters."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/reset/missing_parameters/")
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
class CustomUserPasswordResetTest(AuthViewsTestCase):
user_email = "[email protected]"
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUser.custom_objects.create(
email="[email protected]",
date_of_birth=datetime.date(1976, 11, 8),
)
cls.u1.set_password("password")
cls.u1.save()
def setUp(self):
self.client = PasswordResetConfirmClient()
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post("/password_reset/", {"email": self.user_email})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch[0], urlmatch[1]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
# then submit a new password
response = self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": "anewpassword",
},
)
self.assertRedirects(response, "/reset/done/")
@override_settings(AUTH_USER_MODEL="auth_tests.UUIDUser")
class UUIDUserPasswordResetTest(CustomUserPasswordResetTest):
def _test_confirm_start(self):
# instead of fixture
UUIDUser.objects.create_user(
email=self.user_email,
username="foo",
password="foo",
)
return super()._test_confirm_start()
def test_confirm_invalid_uuid(self):
"""A uidb64 that decodes to a non-UUID doesn't crash."""
_, path = self._test_confirm_start()
invalid_uidb64 = urlsafe_base64_encode(b"INVALID_UUID")
first, _uuidb64_, second = path.strip("/").split("/")
response = self.client.get(
"/" + "/".join((first, invalid_uidb64, second)) + "/"
)
self.assertContains(response, "The password reset link was invalid")
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self):
response = self.client.post(
"/login/",
{
"username": "testclient",
"password": "password",
},
)
self.assertFormError(
response,
AuthenticationForm.error_messages["invalid_login"]
% {"username": User._meta.get_field("username").verbose_name},
)
def logout(self):
self.client.post("/logout/")
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "donuts",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertFormError(
response, PasswordChangeForm.error_messages["password_incorrect"]
)
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "donuts",
},
)
self.assertFormError(
response, SetPasswordForm.error_messages["password_mismatch"]
)
def test_password_change_succeeds(self):
self.login()
self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.fail_login()
self.login(password="password1")
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_change/done/", fetch_redirect_response=False
)
@override_settings(LOGIN_URL="/login/")
def test_password_change_done_fails(self):
response = self.client.get("/password_change/done/")
self.assertRedirects(
response,
"/login/?next=/password_change/done/",
fetch_redirect_response=False,
)
def test_password_change_redirect_default(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_change/done/", fetch_redirect_response=False
)
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post(
"/password_change/custom/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post(
"/password_change/custom/named/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
class SessionAuthenticationTests(AuthViewsTestCase):
def test_user_password_change_updates_session(self):
"""
#21649 - Ensure contrib.auth.views.password_change updates the user's
session auth hash after a password change so the session isn't logged out.
"""
self.login()
original_session_key = self.client.session.session_key
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
# if the hash isn't updated, retrieving the redirection page will fail.
self.assertRedirects(response, "/password_change/done/")
# The session key is rotated.
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse("login"))
self.assertEqual(response.status_code, 200)
if apps.is_installed("django.contrib.sites"):
Site = apps.get_model("sites.Site")
site = Site.objects.get_current()
self.assertEqual(response.context["site"], site)
self.assertEqual(response.context["site_name"], site.name)
else:
self.assertIsInstance(response.context["site"], RequestSite)
self.assertIsInstance(response.context["form"], AuthenticationForm)
def test_security_check(self):
login_url = reverse("login")
# These URLs should not pass the security check.
bad_urls = (
"http://example.com",
"http:///example.com",
"https://example.com",
"ftp://example.com",
"///example.com",
"//example.com",
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = "%(url)s?%(next)s=%(bad_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"bad_url": quote(bad_url),
}
response = self.client.post(
nasty_url,
{
"username": "testclient",
"password": "password",
},
)
self.assertEqual(response.status_code, 302)
self.assertNotIn(
bad_url, response.url, "%s should be blocked" % bad_url
)
# These URLs should pass the security check.
good_urls = (
"/view/?param=http://example.com",
"/view/?param=https://example.com",
"/view?param=ftp://example.com",
"view/?param=//example.com",
"https://testserver/",
"HTTPS://testserver/",
"//testserver/",
"/url%20with%20spaces/",
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = "%(url)s?%(next)s=%(good_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"good_url": quote(good_url),
}
response = self.client.post(
safe_url,
{
"username": "testclient",
"password": "password",
},
)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
def test_security_check_https(self):
login_url = reverse("login")
non_https_next_url = "http://testserver/path"
not_secured_url = "%(url)s?%(next)s=%(next_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"next_url": quote(non_https_next_url),
}
post_data = {
"username": "testclient",
"password": "password",
}
response = self.client.post(not_secured_url, post_data, secure=True)
self.assertEqual(response.status_code, 302)
self.assertNotEqual(response.url, non_https_next_url)
self.assertEqual(response.url, settings.LOGIN_REDIRECT_URL)
def test_login_form_contains_request(self):
# The custom authentication form for this login requires a request to
# initialize it.
response = self.client.post(
"/custom_request_auth_login/",
{
"username": "testclient",
"password": "password",
},
)
# The login was successful.
self.assertRedirects(
response, settings.LOGIN_REDIRECT_URL, fetch_redirect_response=False
)
def test_login_csrf_rotate(self):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
def get_response(request):
return HttpResponse()
# Do a GET to establish a CSRF token
# The test client isn't used here as it's a test for middleware.
req = HttpRequest()
CsrfViewMiddleware(get_response).process_view(req, LoginView.as_view(), (), {})
# get_token() triggers CSRF token inclusion in the response
get_token(req)
resp = CsrfViewMiddleware(LoginView.as_view())(req)
csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {
"username": "testclient",
"password": "password",
"csrfmiddlewaretoken": token1,
}
# Use POST request to log in
SessionMiddleware(get_response).process_request(req)
CsrfViewMiddleware(get_response).process_view(req, LoginView.as_view(), (), {})
req.META[
"SERVER_NAME"
] = "testserver" # Required to have redirect work in login view
req.META["SERVER_PORT"] = 80
resp = CsrfViewMiddleware(LoginView.as_view())(req)
csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
def test_session_key_flushed_on_login(self):
"""
To avoid reusing another user's session, ensure a new, empty session is
created if the existing session corresponds to a different authenticated
user.
"""
self.login()
original_session_key = self.client.session.session_key
self.login(username="staff")
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_session_key_flushed_on_login_after_password_change(self):
"""
As above, but same user logging in after a password change.
"""
self.login()
original_session_key = self.client.session.session_key
# If no password change, session key should not be flushed.
self.login()
self.assertEqual(original_session_key, self.client.session.session_key)
user = User.objects.get(username="testclient")
user.set_password("foobar")
user.save()
self.login(password="foobar")
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_session_without_hash_session_key(self):
"""
Session without django.contrib.auth.HASH_SESSION_KEY should login
without an exception.
"""
user = User.objects.get(username="testclient")
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[SESSION_KEY] = user.id
session.save()
original_session_key = session.session_key
self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key
self.login()
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_get_default_redirect_url(self):
response = self.login(url="/login/get_default_redirect_url/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_login_next_page(self):
response = self.login(url="/login/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
def test_login_named_next_page_named(self):
response = self.login(url="/login/next_page/named/")
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_login_next_page_overrides_login_redirect_url_setting(self):
response = self.login(url="/login/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
def test_login_redirect_url_overrides_next_page(self):
response = self.login(url="/login/next_page/?next=/test/")
self.assertRedirects(response, "/test/", fetch_redirect_response=False)
def test_login_redirect_url_overrides_get_default_redirect_url(self):
response = self.login(url="/login/get_default_redirect_url/?next=/test/")
self.assertRedirects(response, "/test/", fetch_redirect_response=False)
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url):
response = self.client.get("/login_required/")
self.assertRedirects(response, url, fetch_redirect_response=False)
@override_settings(LOGIN_URL="/login/")
def test_standard_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
@override_settings(LOGIN_URL="login")
def test_named_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
@override_settings(LOGIN_URL="http://remote.example.com/login")
def test_remote_login_url(self):
quoted_next = quote("http://testserver/login_required/")
expected = "http://remote.example.com/login?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL="https:///login/")
def test_https_login_url(self):
quoted_next = quote("http://testserver/login_required/")
expected = "https:///login/?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL="/login/?pretty=1")
def test_login_url_with_querystring(self):
self.assertLoginURLEquals("/login/?pretty=1&next=/login_required/")
@override_settings(LOGIN_URL="http://remote.example.com/login/?next=/default/")
def test_remote_login_url_with_next_querystring(self):
quoted_next = quote("http://testserver/login_required/")
expected = "http://remote.example.com/login/?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_lazy_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertRedirects(response, url, fetch_redirect_response=False)
def test_default(self):
self.assertLoginRedirectURLEqual("/accounts/profile/")
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_custom(self):
self.assertLoginRedirectURLEqual("/custom/")
@override_settings(LOGIN_REDIRECT_URL="password_reset")
def test_named(self):
self.assertLoginRedirectURLEqual("/password_reset/")
@override_settings(LOGIN_REDIRECT_URL="http://remote.example.com/welcome/")
def test_remote(self):
self.assertLoginRedirectURLEqual("http://remote.example.com/welcome/")
class RedirectToLoginTests(AuthViewsTestCase):
"""Tests for the redirect_to_login view"""
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_redirect_to_login_with_lazy(self):
login_redirect_response = redirect_to_login(next="/else/where/")
expected = "/login/?next=/else/where/"
self.assertEqual(expected, login_redirect_response.url)
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_redirect_to_login_with_lazy_and_unicode(self):
login_redirect_response = redirect_to_login(next="/else/where/झ/")
expected = "/login/?next=/else/where/%E0%A4%9D/"
self.assertEqual(expected, login_redirect_response.url)
class LogoutThenLoginTests(AuthViewsTestCase):
"""Tests for the logout_then_login view"""
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
@override_settings(LOGIN_URL="/login/")
def test_default_logout_then_login(self):
self.login()
req = HttpRequest()
req.method = "POST"
csrf_token = get_token(req)
req.COOKIES[settings.CSRF_COOKIE_NAME] = csrf_token
req.POST = {"csrfmiddlewaretoken": csrf_token}
req.META["SERVER_NAME"] = "testserver"
req.META["SERVER_PORT"] = 80
req.session = self.client.session
response = logout_then_login(req)
self.confirm_logged_out()
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
def test_logout_then_login_with_custom_login(self):
self.login()
req = HttpRequest()
req.method = "POST"
csrf_token = get_token(req)
req.COOKIES[settings.CSRF_COOKIE_NAME] = csrf_token
req.POST = {"csrfmiddlewaretoken": csrf_token}
req.META["SERVER_NAME"] = "testserver"
req.META["SERVER_PORT"] = 80
req.session = self.client.session
response = logout_then_login(req, login_url="/custom/")
self.confirm_logged_out()
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
@ignore_warnings(category=RemovedInDjango50Warning)
@override_settings(LOGIN_URL="/login/")
def test_default_logout_then_login_get(self):
self.login()
req = HttpRequest()
req.method = "GET"
req.META["SERVER_NAME"] = "testserver"
req.META["SERVER_PORT"] = 80
req.session = self.client.session
response = logout_then_login(req)
# RemovedInDjango50Warning: When the deprecation ends, replace with
# self.assertEqual(response.status_code, 405)
self.confirm_logged_out()
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
class LoginRedirectAuthenticatedUser(AuthViewsTestCase):
dont_redirect_url = "/login/redirect_authenticated_user_default/"
do_redirect_url = "/login/redirect_authenticated_user/"
def test_default(self):
"""Stay on the login page by default."""
self.login()
response = self.client.get(self.dont_redirect_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["next"], "")
def test_guest(self):
"""If not logged in, stay on the same page."""
response = self.client.get(self.do_redirect_url)
self.assertEqual(response.status_code, 200)
def test_redirect(self):
"""If logged in, go to default redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(
response, "/accounts/profile/", fetch_redirect_response=False
)
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_redirect_url(self):
"""If logged in, go to custom redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_redirect_param(self):
"""If next is specified as a GET parameter, go there."""
self.login()
url = self.do_redirect_url + "?next=/custom_next/"
response = self.client.get(url)
self.assertRedirects(response, "/custom_next/", fetch_redirect_response=False)
def test_redirect_loop(self):
"""
Detect a redirect loop if LOGIN_REDIRECT_URL is not correctly set,
with and without custom parameters.
"""
self.login()
msg = (
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page."
)
with self.settings(LOGIN_REDIRECT_URL=self.do_redirect_url):
with self.assertRaisesMessage(ValueError, msg):
self.client.get(self.do_redirect_url)
url = self.do_redirect_url + "?bla=2"
with self.assertRaisesMessage(ValueError, msg):
self.client.get(url)
def test_permission_required_not_logged_in(self):
# Not logged in ...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirected to login.
response = self.client.get("/permission_required_redirect/", follow=True)
self.assertEqual(response.status_code, 200)
# exception raised.
response = self.client.get("/permission_required_exception/", follow=True)
self.assertEqual(response.status_code, 403)
# redirected to login.
response = self.client.get(
"/login_and_permission_required_exception/", follow=True
)
self.assertEqual(response.status_code, 200)
def test_permission_required_logged_in(self):
self.login()
# Already logged in...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirect loop encountered.
with self.assertRaisesMessage(
RedirectCycleError, "Redirect loop detected."
):
self.client.get("/permission_required_redirect/", follow=True)
# exception raised.
response = self.client.get("/permission_required_exception/", follow=True)
self.assertEqual(response.status_code, 403)
# exception raised.
response = self.client.get(
"/login_and_permission_required_exception/", follow=True
)
self.assertEqual(response.status_code, 403)
class LoginSuccessURLAllowedHostsTest(AuthViewsTestCase):
def test_success_url_allowed_hosts_same_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://testserver/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "https://testserver/home", fetch_redirect_response=False
)
def test_success_url_allowed_hosts_safe_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://otherserver/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "https://otherserver/home", fetch_redirect_response=False
)
def test_success_url_allowed_hosts_unsafe_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://evil/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "/accounts/profile/", fetch_redirect_response=False
)
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.post("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def test_logout_with_post(self):
self.login()
response = self.client.post("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def test_logout_with_get_raises_deprecation_warning(self):
self.login()
msg = (
"Log out via GET requests is deprecated and will be removed in Django 5.0. "
"Use POST requests for logging out."
)
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
response = self.client.get("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.post("/logout/")
self.assertIn("site", response.context)
def test_logout_doesnt_cache(self):
"""
The logout() view should send "no-cache" headers for reasons described
in #25490.
"""
response = self.client.post("/logout/")
self.assertIn("no-store", response.headers["Cache-Control"])
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.post("/logout/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
response = self.client.post("/logout/next_page/?next=/login/")
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.post("/logout/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.post("/logout/?next=/login/")
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.post("/logout/custom_query/?follow=/somewhere/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.post("/logout/next_page/named/")
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_same_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://testserver/")
self.assertRedirects(
response, "https://testserver/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_safe_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://otherserver/")
self.assertRedirects(
response, "https://otherserver/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_unsafe_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://evil/")
self.assertRedirects(
response, "/logout/allowed_hosts/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_security_check(self):
logout_url = reverse("logout")
# These URLs should not pass the security check.
bad_urls = (
"http://example.com",
"http:///example.com",
"https://example.com",
"ftp://example.com",
"///example.com",
"//example.com",
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = "%(url)s?%(next)s=%(bad_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"bad_url": quote(bad_url),
}
self.login()
response = self.client.post(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertNotIn(
bad_url, response.url, "%s should be blocked" % bad_url
)
self.confirm_logged_out()
# These URLs should pass the security check.
good_urls = (
"/view/?param=http://example.com",
"/view/?param=https://example.com",
"/view?param=ftp://example.com",
"view/?param=//example.com",
"https://testserver/",
"HTTPS://testserver/",
"//testserver/",
"/url%20with%20spaces/",
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = "%(url)s?%(next)s=%(good_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"good_url": quote(good_url),
}
self.login()
response = self.client.post(safe_url)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
self.confirm_logged_out()
def test_security_check_https(self):
logout_url = reverse("logout")
non_https_next_url = "http://testserver/"
url = "%(url)s?%(next)s=%(next_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"next_url": quote(non_https_next_url),
}
self.login()
response = self.client.post(url, secure=True)
self.assertRedirects(response, logout_url, fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_preserve_language(self):
"""Language is preserved after logout."""
self.login()
self.client.post("/setlang/", {"language": "pl"})
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "pl")
self.client.post("/logout/")
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "pl")
@override_settings(LOGOUT_REDIRECT_URL="/custom/")
def test_logout_redirect_url_setting(self):
self.login()
response = self.client.post("/logout/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
@override_settings(LOGOUT_REDIRECT_URL="/custom/")
def test_logout_redirect_url_setting_allowed_hosts_unsafe_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://evil/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
@override_settings(LOGOUT_REDIRECT_URL="logout")
def test_logout_redirect_url_named_setting(self):
self.login()
response = self.client.post("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def get_perm(Model, perm):
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
# Redirect in test_user_change_password will fail if session auth hash
# isn't updated after password change (#21649)
@override_settings(ROOT_URLCONF="auth_tests.urls_admin")
class ChangelistTests(AuthViewsTestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
# Make me a superuser before logging in.
User.objects.filter(username="testclient").update(
is_staff=True, is_superuser=True
)
def setUp(self):
self.login()
# Get the latest last_login value.
self.admin = User.objects.get(pk=self.u1.pk)
def get_user_data(self, user):
return {
"username": user.username,
"password": user.password,
"email": user.email,
"is_active": user.is_active,
"is_staff": user.is_staff,
"is_superuser": user.is_superuser,
"last_login_0": user.last_login.strftime("%Y-%m-%d"),
"last_login_1": user.last_login.strftime("%H:%M:%S"),
"initial-last_login_0": user.last_login.strftime("%Y-%m-%d"),
"initial-last_login_1": user.last_login.strftime("%H:%M:%S"),
"date_joined_0": user.date_joined.strftime("%Y-%m-%d"),
"date_joined_1": user.date_joined.strftime("%H:%M:%S"),
"initial-date_joined_0": user.date_joined.strftime("%Y-%m-%d"),
"initial-date_joined_1": user.date_joined.strftime("%H:%M:%S"),
"first_name": user.first_name,
"last_name": user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with self.assertLogs("django.security.DisallowedModelAdminLookup", "ERROR"):
response = self.client.get(
reverse("auth_test_admin:auth_user_changelist")
+ "?password__startswith=sha1$"
)
self.assertEqual(response.status_code, 400)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data["email"] = "new_" + data["email"]
response = self.client.post(
reverse("auth_test_admin:auth_user_change", args=(self.admin.pk,)), data
)
self.assertRedirects(response, reverse("auth_test_admin:auth_user_changelist"))
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "Changed Email address.")
def test_user_not_change(self):
response = self.client.post(
reverse("auth_test_admin:auth_user_change", args=(self.admin.pk,)),
self.get_user_data(self.admin),
)
self.assertRedirects(response, reverse("auth_test_admin:auth_user_changelist"))
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "No fields changed.")
def test_user_change_password(self):
user_change_url = reverse(
"auth_test_admin:auth_user_change", args=(self.admin.pk,)
)
password_change_url = reverse(
"auth_test_admin:auth_user_password_change", args=(self.admin.pk,)
)
response = self.client.get(user_change_url)
# Test the link inside password field help_text.
rel_link = re.search(
r'you can change the password using <a href="([^"]*)">this form</a>',
response.content.decode(),
)[1]
self.assertEqual(urljoin(user_change_url, rel_link), password_change_url)
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "Changed password.")
self.logout()
self.login(password="password1")
def test_user_change_different_user_password(self):
u = User.objects.get(email="[email protected]")
response = self.client.post(
reverse("auth_test_admin:auth_user_password_change", args=(u.pk,)),
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(
response, reverse("auth_test_admin:auth_user_change", args=(u.pk,))
)
row = LogEntry.objects.latest("id")
self.assertEqual(row.user_id, self.admin.pk)
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), "Changed password.")
def test_password_change_bad_url(self):
response = self.client.get(
reverse("auth_test_admin:auth_user_password_change", args=("foobar",))
)
self.assertEqual(response.status_code, 404)
@mock.patch("django.contrib.auth.admin.UserAdmin.has_change_permission")
def test_user_change_password_passes_user_to_has_change_permission(
self, has_change_permission
):
url = reverse(
"auth_test_admin:auth_user_password_change", args=(self.admin.pk,)
)
self.client.post(url, {"password1": "password1", "password2": "password1"})
(_request, user), _kwargs = has_change_permission.call_args
self.assertEqual(user.pk, self.admin.pk)
def test_view_user_password_is_readonly(self):
u = User.objects.get(username="testclient")
u.is_superuser = False
u.save()
original_password = u.password
u.user_permissions.add(get_perm(User, "view_user"))
response = self.client.get(
reverse("auth_test_admin:auth_user_change", args=(u.pk,)),
)
algo, salt, hash_string = u.password.split("$")
self.assertContains(response, '<div class="readonly">testclient</div>')
# ReadOnlyPasswordHashWidget is used to render the field.
self.assertContains(
response,
"<strong>algorithm</strong>: <bdi>%s</bdi>\n\n"
"<strong>salt</strong>: <bdi>%s********************</bdi>\n\n"
"<strong>hash</strong>: <bdi>%s**************************</bdi>\n\n"
% (
algo,
salt[:2],
hash_string[:6],
),
html=True,
)
# Value in POST data is ignored.
data = self.get_user_data(u)
data["password"] = "shouldnotchange"
change_url = reverse("auth_test_admin:auth_user_change", args=(u.pk,))
response = self.client.post(change_url, data)
self.assertEqual(response.status_code, 403)
u.refresh_from_db()
self.assertEqual(u.password, original_password)
@override_settings(
AUTH_USER_MODEL="auth_tests.UUIDUser",
ROOT_URLCONF="auth_tests.urls_custom_user_admin",
)
class UUIDUserTests(TestCase):
def test_admin_password_change(self):
u = UUIDUser.objects.create_superuser(
username="uuid", email="[email protected]", password="test"
)
self.assertTrue(self.client.login(username="uuid", password="test"))
user_change_url = reverse(
"custom_user_admin:auth_tests_uuiduser_change", args=(u.pk,)
)
response = self.client.get(user_change_url)
self.assertEqual(response.status_code, 200)
password_change_url = reverse(
"custom_user_admin:auth_user_password_change", args=(u.pk,)
)
response = self.client.get(password_change_url)
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="uuiduser_form">')
# A LogEntry is created with pk=1 which breaks a FK constraint on MySQL
with connection.constraint_checks_disabled():
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest("id")
self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change()
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), "Changed password.")
# The LogEntry.user column isn't altered to a UUID type so it's set to
# an integer manually in CustomUserAdmin to avoid an error. To avoid a
# constraint error, delete the entry before constraints are checked
# after the test.
row.delete()
|
03960dbbb84bd00fa6b4385fc8c81ffd6abd8a0694fba5268a0063e3d593a546 | import builtins
import getpass
import os
import sys
from datetime import date
from io import StringIO
from unittest import mock
from django.apps import apps
from django.contrib.auth import get_permission_codename, management
from django.contrib.auth.management import create_permissions, get_default_username
from django.contrib.auth.management.commands import changepassword, createsuperuser
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core.management import call_command
from django.core.management.base import CommandError
from django.db import migrations
from django.test import TestCase, override_settings
from django.utils.translation import gettext_lazy as _
from .models import (
CustomUser,
CustomUserNonUniqueUsername,
CustomUserWithFK,
CustomUserWithM2M,
CustomUserWithUniqueConstraint,
Email,
Organization,
UserProxy,
)
MOCK_INPUT_KEY_TO_PROMPTS = {
# @mock_inputs dict key: [expected prompt messages],
"bypass": ["Bypass password validation and create user anyway? [y/N]: "],
"email": ["Email address: "],
"date_of_birth": ["Date of birth: "],
"first_name": ["First name: "],
"username": [
"Username: ",
lambda: "Username (leave blank to use '%s'): " % get_default_username(),
],
}
def mock_inputs(inputs):
"""
Decorator to temporarily replace input/getpass to allow interactive
createsuperuser.
"""
def inner(test_func):
def wrapper(*args):
class mock_getpass:
@staticmethod
def getpass(prompt=b"Password: ", stream=None):
if callable(inputs["password"]):
return inputs["password"]()
return inputs["password"]
def mock_input(prompt):
assert "__proxy__" not in prompt
response = None
for key, val in inputs.items():
if val == "KeyboardInterrupt":
raise KeyboardInterrupt
# get() fallback because sometimes 'key' is the actual
# prompt rather than a shortcut name.
prompt_msgs = MOCK_INPUT_KEY_TO_PROMPTS.get(key, key)
if isinstance(prompt_msgs, list):
prompt_msgs = [
msg() if callable(msg) else msg for msg in prompt_msgs
]
if prompt in prompt_msgs:
if callable(val):
response = val()
else:
response = val
break
if response is None:
raise ValueError("Mock input for %r not found." % prompt)
return response
old_getpass = createsuperuser.getpass
old_input = builtins.input
createsuperuser.getpass = mock_getpass
builtins.input = mock_input
try:
test_func(*args)
finally:
createsuperuser.getpass = old_getpass
builtins.input = old_input
return wrapper
return inner
class MockTTY:
"""
A fake stdin object that pretends to be a TTY to be used in conjunction
with mock_inputs.
"""
def isatty(self):
return True
class MockInputTests(TestCase):
@mock_inputs({"username": "alice"})
def test_input_not_found(self):
with self.assertRaisesMessage(
ValueError, "Mock input for 'Email address: ' not found."
):
call_command("createsuperuser", stdin=MockTTY())
class GetDefaultUsernameTestCase(TestCase):
databases = {"default", "other"}
def setUp(self):
self.old_get_system_username = management.get_system_username
def tearDown(self):
management.get_system_username = self.old_get_system_username
def test_actual_implementation(self):
self.assertIsInstance(management.get_system_username(), str)
def test_simple(self):
management.get_system_username = lambda: "joe"
self.assertEqual(management.get_default_username(), "joe")
def test_existing(self):
User.objects.create(username="joe")
management.get_system_username = lambda: "joe"
self.assertEqual(management.get_default_username(), "")
self.assertEqual(management.get_default_username(check_db=False), "joe")
def test_i18n(self):
# 'Julia' with accented 'u':
management.get_system_username = lambda: "J\xfalia"
self.assertEqual(management.get_default_username(), "julia")
def test_with_database(self):
User.objects.create(username="joe")
management.get_system_username = lambda: "joe"
self.assertEqual(management.get_default_username(), "")
self.assertEqual(management.get_default_username(database="other"), "joe")
User.objects.using("other").create(username="joe")
self.assertEqual(management.get_default_username(database="other"), "")
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
)
class ChangepasswordManagementCommandTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user(username="joe", password="qwerty")
def setUp(self):
self.stdout = StringIO()
self.stderr = StringIO()
def tearDown(self):
self.stdout.close()
self.stderr.close()
@mock.patch.object(getpass, "getpass", return_value="password")
def test_get_pass(self, mock_get_pass):
call_command("changepassword", username="joe", stdout=self.stdout)
self.assertIs(User.objects.get(username="joe").check_password("password"), True)
@mock.patch.object(getpass, "getpass", return_value="")
def test_get_pass_no_input(self, mock_get_pass):
with self.assertRaisesMessage(CommandError, "aborted"):
call_command("changepassword", username="joe", stdout=self.stdout)
@mock.patch.object(changepassword.Command, "_get_pass", return_value="new_password")
def test_system_username(self, mock_get_pass):
"""The system username is used if --username isn't provided."""
username = getpass.getuser()
User.objects.create_user(username=username, password="qwerty")
call_command("changepassword", stdout=self.stdout)
self.assertIs(
User.objects.get(username=username).check_password("new_password"), True
)
def test_nonexistent_username(self):
with self.assertRaisesMessage(CommandError, "user 'test' does not exist"):
call_command("changepassword", username="test", stdout=self.stdout)
@mock.patch.object(changepassword.Command, "_get_pass", return_value="not qwerty")
def test_that_changepassword_command_changes_joes_password(self, mock_get_pass):
"Executing the changepassword management command should change joe's password"
self.assertTrue(self.user.check_password("qwerty"))
call_command("changepassword", username="joe", stdout=self.stdout)
command_output = self.stdout.getvalue().strip()
self.assertEqual(
command_output,
"Changing password for user 'joe'\n"
"Password changed successfully for user 'joe'",
)
self.assertTrue(User.objects.get(username="joe").check_password("not qwerty"))
@mock.patch.object(
changepassword.Command, "_get_pass", side_effect=lambda *args: str(args)
)
def test_that_max_tries_exits_1(self, mock_get_pass):
"""
A CommandError should be thrown by handle() if the user enters in
mismatched passwords three times.
"""
msg = "Aborting password change for user 'joe' after 3 attempts"
with self.assertRaisesMessage(CommandError, msg):
call_command(
"changepassword", username="joe", stdout=self.stdout, stderr=self.stderr
)
@mock.patch.object(changepassword.Command, "_get_pass", return_value="1234567890")
def test_password_validation(self, mock_get_pass):
"""
A CommandError should be raised if the user enters in passwords which
fail validation three times.
"""
abort_msg = "Aborting password change for user 'joe' after 3 attempts"
with self.assertRaisesMessage(CommandError, abort_msg):
call_command(
"changepassword", username="joe", stdout=self.stdout, stderr=self.stderr
)
self.assertIn("This password is entirely numeric.", self.stderr.getvalue())
@mock.patch.object(changepassword.Command, "_get_pass", return_value="not qwerty")
def test_that_changepassword_command_works_with_nonascii_output(
self, mock_get_pass
):
"""
#21627 -- Executing the changepassword management command should allow
non-ASCII characters from the User object representation.
"""
# 'Julia' with accented 'u':
User.objects.create_user(username="J\xfalia", password="qwerty")
call_command("changepassword", username="J\xfalia", stdout=self.stdout)
class MultiDBChangepasswordManagementCommandTestCase(TestCase):
databases = {"default", "other"}
@mock.patch.object(changepassword.Command, "_get_pass", return_value="not qwerty")
def test_that_changepassword_command_with_database_option_uses_given_db(
self, mock_get_pass
):
"""
changepassword --database should operate on the specified DB.
"""
user = User.objects.db_manager("other").create_user(
username="joe", password="qwerty"
)
self.assertTrue(user.check_password("qwerty"))
out = StringIO()
call_command("changepassword", username="joe", database="other", stdout=out)
command_output = out.getvalue().strip()
self.assertEqual(
command_output,
"Changing password for user 'joe'\n"
"Password changed successfully for user 'joe'",
)
self.assertTrue(
User.objects.using("other").get(username="joe").check_password("not qwerty")
)
@override_settings(
SILENCED_SYSTEM_CHECKS=["fields.W342"], # ForeignKey(unique=True)
AUTH_PASSWORD_VALIDATORS=[
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}
],
)
class CreatesuperuserManagementCommandTestCase(TestCase):
def test_no_email_argument(self):
new_io = StringIO()
with self.assertRaisesMessage(
CommandError, "You must use --email with --noinput."
):
call_command(
"createsuperuser", interactive=False, username="joe", stdout=new_io
)
def test_basic_usage(self):
"Check the operation of the createsuperuser management command"
# We can use the management command to create a superuser
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe",
email="[email protected]",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = User.objects.get(username="joe")
self.assertEqual(u.email, "[email protected]")
# created password should be unusable
self.assertFalse(u.has_usable_password())
def test_validate_username(self):
msg = (
"Enter a valid username. This value may contain only letters, numbers, "
"and @/./+/-/_ characters."
)
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username="🤠",
email="[email protected]",
)
def test_non_ascii_verbose_name(self):
@mock_inputs(
{
"password": "nopasswd",
"Uživatel (leave blank to use '%s'): "
% get_default_username(): "foo", # username (cz)
"email": "[email protected]",
}
)
def test(self):
username_field = User._meta.get_field("username")
old_verbose_name = username_field.verbose_name
username_field.verbose_name = _("u\u017eivatel")
new_io = StringIO()
try:
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
finally:
username_field.verbose_name = old_verbose_name
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
test(self)
def test_verbosity_zero(self):
# We can suppress output on the management command
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe2",
email="[email protected]",
verbosity=0,
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "")
u = User.objects.get(username="joe2")
self.assertEqual(u.email, "[email protected]")
self.assertFalse(u.has_usable_password())
def test_email_in_username(self):
call_command(
"createsuperuser",
interactive=False,
username="[email protected]",
email="[email protected]",
verbosity=0,
)
u = User._default_manager.get(username="[email protected]")
self.assertEqual(u.email, "[email protected]")
self.assertFalse(u.has_usable_password())
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
def test_swappable_user(self):
"A superuser can be created when a custom user model is in use"
# We can use the management command to create a superuser
# We skip validation because the temporary substitution of the
# swappable User model messes with validation.
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
email="[email protected]",
date_of_birth="1976-04-01",
first_name="Joe",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUser._default_manager.get(email="[email protected]")
self.assertEqual(u.date_of_birth, date(1976, 4, 1))
# created password should be unusable
self.assertFalse(u.has_usable_password())
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
def test_swappable_user_missing_required_field(self):
"A Custom superuser won't be created when a required field isn't provided"
# We can use the management command to create a superuser
# We skip validation because the temporary substitution of the
# swappable User model messes with validation.
new_io = StringIO()
with self.assertRaisesMessage(
CommandError, "You must use --email with --noinput."
):
call_command(
"createsuperuser",
interactive=False,
stdout=new_io,
stderr=new_io,
)
self.assertEqual(CustomUser._default_manager.count(), 0)
@override_settings(
AUTH_USER_MODEL="auth_tests.CustomUserNonUniqueUsername",
AUTHENTICATION_BACKENDS=["my.custom.backend"],
)
def test_swappable_user_username_non_unique(self):
@mock_inputs(
{
"username": "joe",
"password": "nopasswd",
}
)
def createsuperuser():
new_io = StringIO()
call_command(
"createsuperuser",
interactive=True,
email="[email protected]",
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
for i in range(2):
createsuperuser()
users = CustomUserNonUniqueUsername.objects.filter(username="joe")
self.assertEqual(users.count(), 2)
def test_skip_if_not_in_TTY(self):
"""
If the command is not called from a TTY, it should be skipped and a
message should be displayed (#7423).
"""
class FakeStdin:
"""A fake stdin object that has isatty() return False."""
def isatty(self):
return False
out = StringIO()
call_command(
"createsuperuser",
stdin=FakeStdin(),
stdout=out,
interactive=True,
)
self.assertEqual(User._default_manager.count(), 0)
self.assertIn("Superuser creation skipped", out.getvalue())
def test_passing_stdin(self):
"""
You can pass a stdin object as an option and it should be
available on self.stdin.
If no such option is passed, it defaults to sys.stdin.
"""
sentinel = object()
command = createsuperuser.Command()
call_command(
command,
stdin=sentinel,
interactive=False,
verbosity=0,
username="janet",
email="[email protected]",
)
self.assertIs(command.stdin, sentinel)
command = createsuperuser.Command()
call_command(
command,
interactive=False,
verbosity=0,
username="joe",
email="[email protected]",
)
self.assertIs(command.stdin, sys.stdin)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_fields_with_fk(self):
new_io = StringIO()
group = Group.objects.create(name="mygroup")
email = Email.objects.create(email="[email protected]")
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=email.email,
group=group.pk,
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
non_existent_email = "[email protected]"
msg = "email instance with email %r does not exist." % non_existent_email
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=non_existent_email,
stdout=new_io,
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_fields_with_fk_interactive(self):
new_io = StringIO()
group = Group.objects.create(name="mygroup")
email = Email.objects.create(email="[email protected]")
@mock_inputs(
{
"password": "nopasswd",
"Username (Email.id): ": email.pk,
"Email (Email.email): ": email.email,
"Group (Group.id): ": group.pk,
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_fields_with_fk_via_option_interactive(self):
new_io = StringIO()
group = Group.objects.create(name="mygroup")
email = Email.objects.create(email="[email protected]")
@mock_inputs({"password": "nopasswd"})
def test(self):
call_command(
"createsuperuser",
interactive=True,
username=email.pk,
email=email.email,
group=group.pk,
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_validate_fk(self):
email = Email.objects.create(email="[email protected]")
Group.objects.all().delete()
nonexistent_group_id = 1
msg = f"group instance with id {nonexistent_group_id} does not exist."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=email.email,
group=nonexistent_group_id,
verbosity=0,
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_validate_fk_environment_variable(self):
email = Email.objects.create(email="[email protected]")
Group.objects.all().delete()
nonexistent_group_id = 1
msg = f"group instance with id {nonexistent_group_id} does not exist."
with mock.patch.dict(
os.environ,
{"DJANGO_SUPERUSER_GROUP": str(nonexistent_group_id)},
):
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=email.email,
verbosity=0,
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_validate_fk_via_option_interactive(self):
email = Email.objects.create(email="[email protected]")
Group.objects.all().delete()
nonexistent_group_id = 1
msg = f"group instance with id {nonexistent_group_id} does not exist."
@mock_inputs(
{
"password": "nopasswd",
"Username (Email.id): ": email.pk,
"Email (Email.email): ": email.email,
}
)
def test(self):
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
group=nonexistent_group_id,
stdin=MockTTY(),
verbosity=0,
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2m")
def test_fields_with_m2m(self):
new_io = StringIO()
org_id_1 = Organization.objects.create(name="Organization 1").pk
org_id_2 = Organization.objects.create(name="Organization 2").pk
call_command(
"createsuperuser",
interactive=False,
username="joe",
orgs=[org_id_1, org_id_2],
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = CustomUserWithM2M._default_manager.get(username="joe")
self.assertEqual(user.orgs.count(), 2)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2M")
def test_fields_with_m2m_interactive(self):
new_io = StringIO()
org_id_1 = Organization.objects.create(name="Organization 1").pk
org_id_2 = Organization.objects.create(name="Organization 2").pk
@mock_inputs(
{
"password": "nopasswd",
"Username: ": "joe",
"Orgs (Organization.id): ": "%s, %s" % (org_id_1, org_id_2),
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = CustomUserWithM2M._default_manager.get(username="joe")
self.assertEqual(user.orgs.count(), 2)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2M")
def test_fields_with_m2m_interactive_blank(self):
new_io = StringIO()
org_id = Organization.objects.create(name="Organization").pk
entered_orgs = [str(org_id), " "]
def return_orgs():
return entered_orgs.pop()
@mock_inputs(
{
"password": "nopasswd",
"Username: ": "joe",
"Orgs (Organization.id): ": return_orgs,
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stderr=new_io,
stdin=MockTTY(),
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: This field cannot be blank.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2MThrough")
def test_fields_with_m2m_and_through(self):
msg = (
"Required field 'orgs' specifies a many-to-many relation through "
"model, which is not supported."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("createsuperuser")
def test_default_username(self):
"""createsuperuser uses a default username when one isn't provided."""
# Get the default username before creating a user.
default_username = get_default_username()
new_io = StringIO()
entered_passwords = ["password", "password"]
def return_passwords():
return entered_passwords.pop(0)
@mock_inputs({"password": return_passwords, "username": "", "email": ""})
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(), "Superuser created successfully."
)
self.assertTrue(User.objects.filter(username=default_username).exists())
test(self)
def test_password_validation(self):
"""
Creation should fail if the password fails validation.
"""
new_io = StringIO()
entered_passwords = ["1234567890", "1234567890", "password", "password"]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"username": "joe1234567890",
"email": "",
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"This password is entirely numeric.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
]
)
def test_validate_password_against_username(self):
new_io = StringIO()
username = "supremelycomplex"
entered_passwords = [
username,
username,
"superduperunguessablepassword",
"superduperunguessablepassword",
]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"username": username,
"email": "",
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"The password is too similar to the username.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(
AUTH_USER_MODEL="auth_tests.CustomUser",
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
],
)
def test_validate_password_against_required_fields(self):
new_io = StringIO()
first_name = "josephine"
entered_passwords = [
first_name,
first_name,
"superduperunguessablepassword",
"superduperunguessablepassword",
]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"username": "whatever",
"first_name": first_name,
"date_of_birth": "1970-01-01",
"email": "[email protected]",
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"The password is too similar to the first name.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(
AUTH_USER_MODEL="auth_tests.CustomUser",
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
],
)
def test_validate_password_against_required_fields_via_option(self):
new_io = StringIO()
first_name = "josephine"
entered_passwords = [
first_name,
first_name,
"superduperunguessablepassword",
"superduperunguessablepassword",
]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
first_name=first_name,
date_of_birth="1970-01-01",
email="[email protected]",
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"The password is too similar to the first name.\n"
"Superuser created successfully.",
)
test(self)
def test_blank_username(self):
"""Creation fails if --username is blank."""
new_io = StringIO()
with self.assertRaisesMessage(CommandError, "Username cannot be blank."):
call_command(
"createsuperuser",
username="",
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
def test_blank_username_non_interactive(self):
new_io = StringIO()
with self.assertRaisesMessage(CommandError, "Username cannot be blank."):
call_command(
"createsuperuser",
username="",
interactive=False,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
def test_password_validation_bypass(self):
"""
Password validation can be bypassed by entering 'y' at the prompt.
"""
new_io = StringIO()
@mock_inputs(
{
"password": "1234567890",
"username": "joe1234567890",
"email": "",
"bypass": "y",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"This password is entirely numeric.\n"
"Superuser created successfully.",
)
test(self)
def test_invalid_username(self):
"""Creation fails if the username fails validation."""
user_field = User._meta.get_field(User.USERNAME_FIELD)
new_io = StringIO()
entered_passwords = ["password", "password"]
# Enter an invalid (too long) username first and then a valid one.
invalid_username = ("x" * user_field.max_length) + "y"
entered_usernames = [invalid_username, "janet"]
def return_passwords():
return entered_passwords.pop(0)
def return_usernames():
return entered_usernames.pop(0)
@mock_inputs(
{"password": return_passwords, "username": return_usernames, "email": ""}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: Ensure this value has at most %s characters (it has %s).\n"
"Superuser created successfully."
% (user_field.max_length, len(invalid_username)),
)
test(self)
@mock_inputs({"username": "KeyboardInterrupt"})
def test_keyboard_interrupt(self):
new_io = StringIO()
with self.assertRaises(SystemExit):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(new_io.getvalue(), "\nOperation cancelled.\n")
def test_existing_username(self):
"""Creation fails if the username already exists."""
user = User.objects.create(username="janet")
new_io = StringIO()
entered_passwords = ["password", "password"]
# Enter the existing username first and then a new one.
entered_usernames = [user.username, "joe"]
def return_passwords():
return entered_passwords.pop(0)
def return_usernames():
return entered_usernames.pop(0)
@mock_inputs(
{"password": return_passwords, "username": return_usernames, "email": ""}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: That username is already taken.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithUniqueConstraint")
def test_existing_username_meta_unique_constraint(self):
"""
Creation fails if the username already exists and a custom user model
has UniqueConstraint.
"""
user = CustomUserWithUniqueConstraint.objects.create(username="janet")
new_io = StringIO()
entered_passwords = ["password", "password"]
# Enter the existing username first and then a new one.
entered_usernames = [user.username, "joe"]
def return_passwords():
return entered_passwords.pop(0)
def return_usernames():
return entered_usernames.pop(0)
@mock_inputs({"password": return_passwords, "username": return_usernames})
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: That username is already taken.\n"
"Superuser created successfully.",
)
test(self)
def test_existing_username_non_interactive(self):
"""Creation fails if the username already exists."""
User.objects.create(username="janet")
new_io = StringIO()
with self.assertRaisesMessage(
CommandError, "Error: That username is already taken."
):
call_command(
"createsuperuser",
username="janet",
email="",
interactive=False,
stdout=new_io,
)
def test_existing_username_provided_via_option_and_interactive(self):
"""call_command() gets username='janet' and interactive=True."""
new_io = StringIO()
entered_passwords = ["password", "password"]
User.objects.create(username="janet")
def return_passwords():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": return_passwords,
"username": "janet1",
"email": "[email protected]",
}
)
def test(self):
call_command(
"createsuperuser",
username="janet",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
msg = (
"Error: That username is already taken.\n"
"Superuser created successfully."
)
self.assertEqual(new_io.getvalue().strip(), msg)
test(self)
def test_validation_mismatched_passwords(self):
"""
Creation should fail if the user enters mismatched passwords.
"""
new_io = StringIO()
# The first two passwords do not match, but the second two do match and
# are valid.
entered_passwords = ["password", "not password", "password2", "password2"]
def mismatched_passwords_then_matched():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": mismatched_passwords_then_matched,
"username": "joe1234567890",
"email": "",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: Your passwords didn't match.\n"
"Superuser created successfully.",
)
test(self)
def test_validation_blank_password_entered(self):
"""
Creation should fail if the user enters blank passwords.
"""
new_io = StringIO()
# The first two passwords are empty strings, but the second two are
# valid.
entered_passwords = ["", "", "password2", "password2"]
def blank_passwords_then_valid():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": blank_passwords_then_valid,
"username": "joe1234567890",
"email": "",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: Blank passwords aren't allowed.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.NoPasswordUser")
def test_usermodel_without_password(self):
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
username="username",
)
self.assertEqual(new_io.getvalue().strip(), "Superuser created successfully.")
@override_settings(AUTH_USER_MODEL="auth_tests.NoPasswordUser")
def test_usermodel_without_password_interactive(self):
new_io = StringIO()
@mock_inputs({"username": "username"})
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(), "Superuser created successfully."
)
test(self)
@mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_PASSWORD": "test_password",
"DJANGO_SUPERUSER_USERNAME": "test_superuser",
"DJANGO_SUPERUSER_EMAIL": "[email protected]",
"DJANGO_SUPERUSER_FIRST_NAME": "ignored_first_name",
},
)
def test_environment_variable_non_interactive(self):
call_command("createsuperuser", interactive=False, verbosity=0)
user = User.objects.get(username="test_superuser")
self.assertEqual(user.email, "[email protected]")
self.assertTrue(user.check_password("test_password"))
# Environment variables are ignored for non-required fields.
self.assertEqual(user.first_name, "")
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2m")
def test_environment_variable_m2m_non_interactive(self):
new_io = StringIO()
org_id_1 = Organization.objects.create(name="Organization 1").pk
org_id_2 = Organization.objects.create(name="Organization 2").pk
with mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_ORGS": f"{org_id_1},{org_id_2}",
},
):
call_command(
"createsuperuser",
interactive=False,
username="joe",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = CustomUserWithM2M._default_manager.get(username="joe")
self.assertEqual(user.orgs.count(), 2)
@mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_USERNAME": "test_superuser",
"DJANGO_SUPERUSER_EMAIL": "[email protected]",
},
)
def test_ignore_environment_variable_non_interactive(self):
# Environment variables are ignored in non-interactive mode, if
# provided by a command line arguments.
call_command(
"createsuperuser",
interactive=False,
username="cmd_superuser",
email="[email protected]",
verbosity=0,
)
user = User.objects.get(username="cmd_superuser")
self.assertEqual(user.email, "[email protected]")
self.assertFalse(user.has_usable_password())
@mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_PASSWORD": "test_password",
"DJANGO_SUPERUSER_USERNAME": "test_superuser",
"DJANGO_SUPERUSER_EMAIL": "[email protected]",
},
)
def test_ignore_environment_variable_interactive(self):
# Environment variables are ignored in interactive mode.
@mock_inputs({"password": "cmd_password"})
def test(self):
call_command(
"createsuperuser",
interactive=True,
username="cmd_superuser",
email="[email protected]",
stdin=MockTTY(),
verbosity=0,
)
user = User.objects.get(username="cmd_superuser")
self.assertEqual(user.email, "[email protected]")
self.assertTrue(user.check_password("cmd_password"))
test(self)
class MultiDBCreatesuperuserTestCase(TestCase):
databases = {"default", "other"}
def test_createsuperuser_command_with_database_option(self):
"""
createsuperuser --database should operate on the specified DB.
"""
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe",
email="[email protected]",
database="other",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = User.objects.using("other").get(username="joe")
self.assertEqual(user.email, "[email protected]")
def test_createsuperuser_command_suggested_username_with_database_option(self):
default_username = get_default_username(database="other")
qs = User.objects.using("other")
@mock_inputs({"password": "nopasswd", "username": "", "email": ""})
def test_other_create_with_suggested_username(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
verbosity=0,
database="other",
)
self.assertIs(qs.filter(username=default_username).exists(), True)
test_other_create_with_suggested_username(self)
@mock_inputs({"password": "nopasswd", "Username: ": "other", "email": ""})
def test_other_no_suggestion(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
verbosity=0,
database="other",
)
self.assertIs(qs.filter(username="other").exists(), True)
test_other_no_suggestion(self)
class CreatePermissionsTests(TestCase):
def setUp(self):
self._original_permissions = Permission._meta.permissions[:]
self._original_default_permissions = Permission._meta.default_permissions
self.app_config = apps.get_app_config("auth")
def tearDown(self):
Permission._meta.permissions = self._original_permissions
Permission._meta.default_permissions = self._original_default_permissions
ContentType.objects.clear_cache()
def test_default_permissions(self):
permission_content_type = ContentType.objects.get_by_natural_key(
"auth", "permission"
)
Permission._meta.permissions = [
("my_custom_permission", "Some permission"),
]
create_permissions(self.app_config, verbosity=0)
# view/add/change/delete permission by default + custom permission
self.assertEqual(
Permission.objects.filter(
content_type=permission_content_type,
).count(),
5,
)
Permission.objects.filter(content_type=permission_content_type).delete()
Permission._meta.default_permissions = []
create_permissions(self.app_config, verbosity=0)
# custom permission only since default permissions is empty
self.assertEqual(
Permission.objects.filter(
content_type=permission_content_type,
).count(),
1,
)
def test_unavailable_models(self):
"""
#24075 - Permissions shouldn't be created or deleted if the ContentType
or Permission models aren't available.
"""
state = migrations.state.ProjectState()
# Unavailable contenttypes.ContentType
with self.assertNumQueries(0):
create_permissions(self.app_config, verbosity=0, apps=state.apps)
# Unavailable auth.Permission
state = migrations.state.ProjectState(real_apps={"contenttypes"})
with self.assertNumQueries(0):
create_permissions(self.app_config, verbosity=0, apps=state.apps)
def test_create_permissions_checks_contenttypes_created(self):
"""
`post_migrate` handler ordering isn't guaranteed. Simulate a case
where create_permissions() is called before create_contenttypes().
"""
# Warm the manager cache.
ContentType.objects.get_for_model(Group)
# Apply a deletion as if e.g. a database 'flush' had been executed.
ContentType.objects.filter(app_label="auth", model="group").delete()
# This fails with a foreign key constraint without the fix.
create_permissions(apps.get_app_config("auth"), interactive=False, verbosity=0)
def test_permission_with_proxy_content_type_created(self):
"""
A proxy model's permissions use its own content type rather than the
content type of the concrete model.
"""
opts = UserProxy._meta
codename = get_permission_codename("add", opts)
self.assertTrue(
Permission.objects.filter(
content_type__model=opts.model_name,
content_type__app_label=opts.app_label,
codename=codename,
).exists()
)
class DefaultDBRouter:
"""Route all writes to default."""
def db_for_write(self, model, **hints):
return "default"
@override_settings(DATABASE_ROUTERS=[DefaultDBRouter()])
class CreatePermissionsMultipleDatabasesTests(TestCase):
databases = {"default", "other"}
def test_set_permissions_fk_to_using_parameter(self):
Permission.objects.using("other").delete()
with self.assertNumQueries(6, using="other") as captured_queries:
create_permissions(apps.get_app_config("auth"), verbosity=0, using="other")
self.assertIn("INSERT INTO", captured_queries[-1]["sql"].upper())
self.assertGreater(Permission.objects.using("other").count(), 0)
|
6d0486024ed9a0d547c01209c99732ee0a2a602f926f31108aa8179373626d65 | import datetime
import re
import urllib.parse
from unittest import mock
from django.contrib.auth.forms import (
AdminPasswordChangeForm,
AuthenticationForm,
BaseUserCreationForm,
PasswordChangeForm,
PasswordResetForm,
ReadOnlyPasswordHashField,
ReadOnlyPasswordHashWidget,
SetPasswordForm,
UserChangeForm,
UserCreationForm,
)
from django.contrib.auth.models import User
from django.contrib.auth.signals import user_login_failed
from django.contrib.sites.models import Site
from django.core import mail
from django.core.exceptions import ValidationError
from django.core.mail import EmailMultiAlternatives
from django.forms import forms
from django.forms.fields import CharField, Field, IntegerField
from django.test import SimpleTestCase, TestCase, override_settings
from django.urls import reverse
from django.utils import translation
from django.utils.text import capfirst
from django.utils.translation import gettext as _
from .models.custom_user import (
CustomUser,
CustomUserWithoutIsActiveField,
ExtensionUser,
)
from .models.with_custom_email_field import CustomEmailField
from .models.with_integer_username import IntegerUsernameUser
from .models.with_many_to_many import CustomUserWithM2M, Organization
from .settings import AUTH_TEMPLATES
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(
username="testclient", password="password", email="[email protected]"
)
cls.u2 = User.objects.create_user(
username="inactive", password="password", is_active=False
)
cls.u3 = User.objects.create_user(username="staff", password="password")
cls.u4 = User.objects.create(username="empty_password", password="")
cls.u5 = User.objects.create(username="unmanageable_password", password="$")
cls.u6 = User.objects.create(username="unknown_password", password="foo$bar")
class BaseUserCreationFormTest(TestDataMixin, TestCase):
def test_user_already_exists(self):
data = {
"username": "testclient",
"password1": "test123",
"password2": "test123",
}
form = BaseUserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["username"].errors,
[str(User._meta.get_field("username").error_messages["unique"])],
)
def test_invalid_data(self):
data = {
"username": "jsmith!",
"password1": "test123",
"password2": "test123",
}
form = BaseUserCreationForm(data)
self.assertFalse(form.is_valid())
validator = next(
v
for v in User._meta.get_field("username").validators
if v.code == "invalid"
)
self.assertEqual(form["username"].errors, [str(validator.message)])
def test_password_verification(self):
# The verification password is incorrect.
data = {
"username": "jsmith",
"password1": "test123",
"password2": "test",
}
form = BaseUserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["password2"].errors, [str(form.error_messages["password_mismatch"])]
)
def test_both_passwords(self):
# One (or both) passwords weren't given
data = {"username": "jsmith"}
form = BaseUserCreationForm(data)
required_error = [str(Field.default_error_messages["required"])]
self.assertFalse(form.is_valid())
self.assertEqual(form["password1"].errors, required_error)
self.assertEqual(form["password2"].errors, required_error)
data["password2"] = "test123"
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["password1"].errors, required_error)
self.assertEqual(form["password2"].errors, [])
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
# The success case.
data = {
"username": "[email protected]",
"password1": "test123",
"password2": "test123",
}
form = BaseUserCreationForm(data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
u = form.save()
self.assertEqual(password_changed.call_count, 1)
self.assertEqual(repr(u), "<User: [email protected]>")
def test_unicode_username(self):
data = {
"username": "宝",
"password1": "test123",
"password2": "test123",
}
form = BaseUserCreationForm(data)
self.assertTrue(form.is_valid())
u = form.save()
self.assertEqual(u.username, "宝")
def test_normalize_username(self):
# The normalization happens in AbstractBaseUser.clean() and ModelForm
# validation calls Model.clean().
ohm_username = "testΩ" # U+2126 OHM SIGN
data = {
"username": ohm_username,
"password1": "pwd2",
"password2": "pwd2",
}
form = BaseUserCreationForm(data)
self.assertTrue(form.is_valid())
user = form.save()
self.assertNotEqual(user.username, ohm_username)
self.assertEqual(user.username, "testΩ") # U+03A9 GREEK CAPITAL LETTER OMEGA
def test_duplicate_normalized_unicode(self):
"""
To prevent almost identical usernames, visually identical but differing
by their unicode code points only, Unicode NFKC normalization should
make appear them equal to Django.
"""
omega_username = "iamtheΩ" # U+03A9 GREEK CAPITAL LETTER OMEGA
ohm_username = "iamtheΩ" # U+2126 OHM SIGN
self.assertNotEqual(omega_username, ohm_username)
User.objects.create_user(username=omega_username, password="pwd")
data = {
"username": ohm_username,
"password1": "pwd2",
"password2": "pwd2",
}
form = BaseUserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors["username"], ["A user with that username already exists."]
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_validates_password(self):
data = {
"username": "testclient",
"password1": "testclient",
"password2": "testclient",
}
form = BaseUserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(len(form["password2"].errors), 2)
self.assertIn(
"The password is too similar to the username.", form["password2"].errors
)
self.assertIn(
"This password is too short. It must contain at least 12 characters.",
form["password2"].errors,
)
def test_custom_form(self):
class CustomUserCreationForm(BaseUserCreationForm):
class Meta(BaseUserCreationForm.Meta):
model = ExtensionUser
fields = UserCreationForm.Meta.fields + ("date_of_birth",)
data = {
"username": "testclient",
"password1": "testclient",
"password2": "testclient",
"date_of_birth": "1988-02-24",
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_custom_form_with_different_username_field(self):
class CustomUserCreationForm(BaseUserCreationForm):
class Meta(BaseUserCreationForm.Meta):
model = CustomUser
fields = ("email", "date_of_birth")
data = {
"email": "[email protected]",
"password1": "testclient",
"password2": "testclient",
"date_of_birth": "1988-02-24",
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_custom_form_hidden_username_field(self):
class CustomUserCreationForm(BaseUserCreationForm):
class Meta(BaseUserCreationForm.Meta):
model = CustomUserWithoutIsActiveField
fields = ("email",) # without USERNAME_FIELD
data = {
"email": "[email protected]",
"password1": "testclient",
"password2": "testclient",
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_custom_form_saves_many_to_many_field(self):
class CustomUserCreationForm(BaseUserCreationForm):
class Meta(BaseUserCreationForm.Meta):
model = CustomUserWithM2M
fields = UserCreationForm.Meta.fields + ("orgs",)
organization = Organization.objects.create(name="organization 1")
data = {
"username": "[email protected]",
"password1": "testclient",
"password2": "testclient",
"orgs": [str(organization.pk)],
}
form = CustomUserCreationForm(data)
self.assertIs(form.is_valid(), True)
user = form.save(commit=True)
self.assertSequenceEqual(user.orgs.all(), [organization])
def test_password_whitespace_not_stripped(self):
data = {
"username": "testuser",
"password1": " testpassword ",
"password2": " testpassword ",
}
form = BaseUserCreationForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["password1"], data["password1"])
self.assertEqual(form.cleaned_data["password2"], data["password2"])
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
]
)
def test_password_help_text(self):
form = BaseUserCreationForm()
self.assertEqual(
form.fields["password1"].help_text,
"<ul><li>"
"Your password can’t be too similar to your other personal information."
"</li></ul>",
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
]
)
def test_user_create_form_validates_password_with_all_data(self):
"""
BaseUserCreationForm password validation uses all of the form's data.
"""
class CustomUserCreationForm(BaseUserCreationForm):
class Meta(BaseUserCreationForm.Meta):
model = User
fields = ("username", "email", "first_name", "last_name")
form = CustomUserCreationForm(
{
"username": "testuser",
"password1": "testpassword",
"password2": "testpassword",
"first_name": "testpassword",
"last_name": "lastname",
}
)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors["password2"],
["The password is too similar to the first name."],
)
def test_username_field_autocapitalize_none(self):
form = BaseUserCreationForm()
self.assertEqual(
form.fields["username"].widget.attrs.get("autocapitalize"), "none"
)
def test_html_autocomplete_attributes(self):
form = BaseUserCreationForm()
tests = (
("username", "username"),
("password1", "new-password"),
("password2", "new-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
class UserCreationFormTest(TestDataMixin, TestCase):
def test_case_insensitive_username(self):
data = {
"username": "TeStClIeNt",
"password1": "test123",
"password2": "test123",
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["username"].errors,
["A user with that username already exists."],
)
# To verify that the login form rejects inactive users, use an authentication
# backend that allows them.
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.AllowAllUsersModelBackend"]
)
class AuthenticationFormTest(TestDataMixin, TestCase):
def test_invalid_username(self):
# The user submits an invalid username.
data = {
"username": "jsmith_does_not_exist",
"password": "test123",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(),
[
form.error_messages["invalid_login"]
% {"username": User._meta.get_field("username").verbose_name}
],
)
def test_inactive_user(self):
# The user is inactive.
data = {
"username": "inactive",
"password": "password",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(), [str(form.error_messages["inactive"])]
)
# Use an authentication backend that rejects inactive users.
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.ModelBackend"]
)
def test_inactive_user_incorrect_password(self):
"""An invalid login doesn't leak the inactive status of a user."""
data = {
"username": "inactive",
"password": "incorrect",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(),
[
form.error_messages["invalid_login"]
% {"username": User._meta.get_field("username").verbose_name}
],
)
def test_login_failed(self):
signal_calls = []
def signal_handler(**kwargs):
signal_calls.append(kwargs)
user_login_failed.connect(signal_handler)
fake_request = object()
try:
form = AuthenticationForm(
fake_request,
{
"username": "testclient",
"password": "incorrect",
},
)
self.assertFalse(form.is_valid())
self.assertIs(signal_calls[0]["request"], fake_request)
finally:
user_login_failed.disconnect(signal_handler)
def test_inactive_user_i18n(self):
with self.settings(USE_I18N=True), translation.override(
"pt-br", deactivate=True
):
# The user is inactive.
data = {
"username": "inactive",
"password": "password",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(), [str(form.error_messages["inactive"])]
)
# Use an authentication backend that allows inactive users.
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.AllowAllUsersModelBackend"
]
)
def test_custom_login_allowed_policy(self):
# The user is inactive, but our custom form policy allows them to log in.
data = {
"username": "inactive",
"password": "password",
}
class AuthenticationFormWithInactiveUsersOkay(AuthenticationForm):
def confirm_login_allowed(self, user):
pass
form = AuthenticationFormWithInactiveUsersOkay(None, data)
self.assertTrue(form.is_valid())
# Raise a ValidationError in the form to disallow some logins according
# to custom logic.
class PickyAuthenticationForm(AuthenticationForm):
def confirm_login_allowed(self, user):
if user.username == "inactive":
raise ValidationError("This user is disallowed.")
raise ValidationError("Sorry, nobody's allowed in.")
form = PickyAuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), ["This user is disallowed."])
data = {
"username": "testclient",
"password": "password",
}
form = PickyAuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), ["Sorry, nobody's allowed in."])
def test_success(self):
# The success case
data = {
"username": "testclient",
"password": "password",
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
def test_unicode_username(self):
User.objects.create_user(username="Σαρα", password="pwd")
data = {
"username": "Σαρα",
"password": "pwd",
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
@override_settings(AUTH_USER_MODEL="auth_tests.CustomEmailField")
def test_username_field_max_length_matches_user_model(self):
self.assertEqual(CustomEmailField._meta.get_field("username").max_length, 255)
data = {
"username": "u" * 255,
"password": "pwd",
"email": "[email protected]",
}
CustomEmailField.objects.create_user(**data)
form = AuthenticationForm(None, data)
self.assertEqual(form.fields["username"].max_length, 255)
self.assertEqual(form.fields["username"].widget.attrs.get("maxlength"), 255)
self.assertEqual(form.errors, {})
@override_settings(AUTH_USER_MODEL="auth_tests.IntegerUsernameUser")
def test_username_field_max_length_defaults_to_254(self):
self.assertIsNone(IntegerUsernameUser._meta.get_field("username").max_length)
data = {
"username": "0123456",
"password": "password",
}
IntegerUsernameUser.objects.create_user(**data)
form = AuthenticationForm(None, data)
self.assertEqual(form.fields["username"].max_length, 254)
self.assertEqual(form.fields["username"].widget.attrs.get("maxlength"), 254)
self.assertEqual(form.errors, {})
def test_username_field_label(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField(label="Name", max_length=75)
form = CustomAuthenticationForm()
self.assertEqual(form["username"].label, "Name")
def test_username_field_label_not_set(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField()
form = CustomAuthenticationForm()
username_field = User._meta.get_field(User.USERNAME_FIELD)
self.assertEqual(
form.fields["username"].label, capfirst(username_field.verbose_name)
)
def test_username_field_autocapitalize_none(self):
form = AuthenticationForm()
self.assertEqual(
form.fields["username"].widget.attrs.get("autocapitalize"), "none"
)
def test_username_field_label_empty_string(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField(label="")
form = CustomAuthenticationForm()
self.assertEqual(form.fields["username"].label, "")
def test_password_whitespace_not_stripped(self):
data = {
"username": "testuser",
"password": " pass ",
}
form = AuthenticationForm(None, data)
form.is_valid() # Not necessary to have valid credentails for the test.
self.assertEqual(form.cleaned_data["password"], data["password"])
@override_settings(AUTH_USER_MODEL="auth_tests.IntegerUsernameUser")
def test_integer_username(self):
class CustomAuthenticationForm(AuthenticationForm):
username = IntegerField()
user = IntegerUsernameUser.objects.create_user(username=0, password="pwd")
data = {
"username": 0,
"password": "pwd",
}
form = CustomAuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["username"], data["username"])
self.assertEqual(form.cleaned_data["password"], data["password"])
self.assertEqual(form.errors, {})
self.assertEqual(form.user_cache, user)
def test_get_invalid_login_error(self):
error = AuthenticationForm().get_invalid_login_error()
self.assertIsInstance(error, ValidationError)
self.assertEqual(
error.message,
"Please enter a correct %(username)s and password. Note that both "
"fields may be case-sensitive.",
)
self.assertEqual(error.code, "invalid_login")
self.assertEqual(error.params, {"username": "username"})
def test_html_autocomplete_attributes(self):
form = AuthenticationForm()
tests = (
("username", "username"),
("password", "current-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
def test_no_password(self):
data = {"username": "username"}
form = AuthenticationForm(None, data)
self.assertIs(form.is_valid(), False)
self.assertEqual(
form["password"].errors, [Field.default_error_messages["required"]]
)
class SetPasswordFormTest(TestDataMixin, TestCase):
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username="testclient")
data = {
"new_password1": "abc123",
"new_password2": "abc",
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["new_password2"].errors,
[str(form.error_messages["password_mismatch"])],
)
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
user = User.objects.get(username="testclient")
data = {
"new_password1": "abc123",
"new_password2": "abc123",
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_validates_password(self):
user = User.objects.get(username="testclient")
data = {
"new_password1": "testclient",
"new_password2": "testclient",
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(len(form["new_password2"].errors), 2)
self.assertIn(
"The password is too similar to the username.", form["new_password2"].errors
)
self.assertIn(
"This password is too short. It must contain at least 12 characters.",
form["new_password2"].errors,
)
def test_no_password(self):
user = User.objects.get(username="testclient")
data = {"new_password1": "new-password"}
form = SetPasswordForm(user, data)
self.assertIs(form.is_valid(), False)
self.assertEqual(
form["new_password2"].errors, [Field.default_error_messages["required"]]
)
form = SetPasswordForm(user, {})
self.assertIs(form.is_valid(), False)
self.assertEqual(
form["new_password1"].errors, [Field.default_error_messages["required"]]
)
self.assertEqual(
form["new_password2"].errors, [Field.default_error_messages["required"]]
)
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username="testclient")
data = {
"new_password1": " password ",
"new_password2": " password ",
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["new_password1"], data["new_password1"])
self.assertEqual(form.cleaned_data["new_password2"], data["new_password2"])
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_help_text_translation(self):
french_help_texts = [
"Votre mot de passe ne peut pas trop ressembler à vos autres informations "
"personnelles.",
"Votre mot de passe doit contenir au minimum 12 caractères.",
]
form = SetPasswordForm(self.u1)
with translation.override("fr"):
html = form.as_p()
for french_text in french_help_texts:
self.assertIn(french_text, html)
def test_html_autocomplete_attributes(self):
form = SetPasswordForm(self.u1)
tests = (
("new_password1", "new-password"),
("new_password2", "new-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
class PasswordChangeFormTest(TestDataMixin, TestCase):
def test_incorrect_password(self):
user = User.objects.get(username="testclient")
data = {
"old_password": "test",
"new_password1": "abc123",
"new_password2": "abc123",
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["old_password"].errors,
[str(form.error_messages["password_incorrect"])],
)
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username="testclient")
data = {
"old_password": "password",
"new_password1": "abc123",
"new_password2": "abc",
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["new_password2"].errors,
[str(form.error_messages["password_mismatch"])],
)
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
# The success case.
user = User.objects.get(username="testclient")
data = {
"old_password": "password",
"new_password1": "abc123",
"new_password2": "abc123",
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
def test_field_order(self):
# Regression test - check the order of fields:
user = User.objects.get(username="testclient")
self.assertEqual(
list(PasswordChangeForm(user, {}).fields),
["old_password", "new_password1", "new_password2"],
)
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username="testclient")
user.set_password(" oldpassword ")
data = {
"old_password": " oldpassword ",
"new_password1": " pass ",
"new_password2": " pass ",
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["old_password"], data["old_password"])
self.assertEqual(form.cleaned_data["new_password1"], data["new_password1"])
self.assertEqual(form.cleaned_data["new_password2"], data["new_password2"])
def test_html_autocomplete_attributes(self):
user = User.objects.get(username="testclient")
form = PasswordChangeForm(user)
self.assertEqual(
form.fields["old_password"].widget.attrs["autocomplete"], "current-password"
)
class UserChangeFormTest(TestDataMixin, TestCase):
def test_username_validity(self):
user = User.objects.get(username="testclient")
data = {"username": "not valid"}
form = UserChangeForm(data, instance=user)
self.assertFalse(form.is_valid())
validator = next(
v
for v in User._meta.get_field("username").validators
if v.code == "invalid"
)
self.assertEqual(form["username"].errors, [str(validator.message)])
def test_bug_14242(self):
# A regression test, introduce by adding an optimization for the
# UserChangeForm.
class MyUserForm(UserChangeForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields[
"groups"
].help_text = "These groups give users different permissions"
class Meta(UserChangeForm.Meta):
fields = ("groups",)
# Just check we can create it
MyUserForm({})
def test_unusable_password(self):
user = User.objects.get(username="empty_password")
user.set_unusable_password()
user.save()
form = UserChangeForm(instance=user)
self.assertIn(_("No password set."), form.as_table())
def test_bug_17944_empty_password(self):
user = User.objects.get(username="empty_password")
form = UserChangeForm(instance=user)
self.assertIn(_("No password set."), form.as_table())
def test_bug_17944_unmanageable_password(self):
user = User.objects.get(username="unmanageable_password")
form = UserChangeForm(instance=user)
self.assertIn(
_("Invalid password format or unknown hashing algorithm."), form.as_table()
)
def test_bug_17944_unknown_password_algorithm(self):
user = User.objects.get(username="unknown_password")
form = UserChangeForm(instance=user)
self.assertIn(
_("Invalid password format or unknown hashing algorithm."), form.as_table()
)
def test_bug_19133(self):
"The change form does not return the password value"
# Use the form to construct the POST data
user = User.objects.get(username="testclient")
form_for_data = UserChangeForm(instance=user)
post_data = form_for_data.initial
# The password field should be readonly, so anything
# posted here should be ignored; the form will be
# valid, and give back the 'initial' value for the
# password field.
post_data["password"] = "new password"
form = UserChangeForm(instance=user, data=post_data)
self.assertTrue(form.is_valid())
# original hashed password contains $
self.assertIn("$", form.cleaned_data["password"])
def test_bug_19349_bound_password_field(self):
user = User.objects.get(username="testclient")
form = UserChangeForm(data={}, instance=user)
# When rendering the bound password field,
# ReadOnlyPasswordHashWidget needs the initial
# value to render correctly
self.assertEqual(form.initial["password"], form["password"].value())
@override_settings(ROOT_URLCONF="auth_tests.urls_admin")
def test_link_to_password_reset_in_helptext_via_to_field(self):
user = User.objects.get(username="testclient")
form = UserChangeForm(data={}, instance=user)
password_help_text = form.fields["password"].help_text
matches = re.search('<a href="(.*?)">', password_help_text)
# URL to UserChangeForm in admin via to_field (instead of pk).
admin_user_change_url = reverse(
f"admin:{user._meta.app_label}_{user._meta.model_name}_change",
args=(user.username,),
)
joined_url = urllib.parse.urljoin(admin_user_change_url, matches.group(1))
pw_change_url = reverse(
f"admin:{user._meta.app_label}_{user._meta.model_name}_password_change",
args=(user.pk,),
)
self.assertEqual(joined_url, pw_change_url)
def test_custom_form(self):
class CustomUserChangeForm(UserChangeForm):
class Meta(UserChangeForm.Meta):
model = ExtensionUser
fields = (
"username",
"password",
"date_of_birth",
)
user = User.objects.get(username="testclient")
data = {
"username": "testclient",
"password": "testclient",
"date_of_birth": "1998-02-24",
}
form = CustomUserChangeForm(data, instance=user)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(form.cleaned_data["username"], "testclient")
self.assertEqual(form.cleaned_data["date_of_birth"], datetime.date(1998, 2, 24))
def test_password_excluded(self):
class UserChangeFormWithoutPassword(UserChangeForm):
password = None
class Meta:
model = User
exclude = ["password"]
form = UserChangeFormWithoutPassword()
self.assertNotIn("password", form.fields)
def test_username_field_autocapitalize_none(self):
form = UserChangeForm()
self.assertEqual(
form.fields["username"].widget.attrs.get("autocapitalize"), "none"
)
@override_settings(TEMPLATES=AUTH_TEMPLATES)
class PasswordResetFormTest(TestDataMixin, TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
def create_dummy_user(self):
"""
Create a user and return a tuple (user_object, username, email).
"""
username = "jsmith"
email = "[email protected]"
user = User.objects.create_user(username, email, "test123")
return (user, username, email)
def test_invalid_email(self):
data = {"email": "not valid"}
form = PasswordResetForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["email"].errors, [_("Enter a valid email address.")])
def test_user_email_unicode_collision(self):
User.objects.create_user("mike123", "[email protected]", "test123")
User.objects.create_user("mike456", "mı[email protected]", "test123")
data = {"email": "mı[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ["mı[email protected]"])
def test_user_email_domain_unicode_collision(self):
User.objects.create_user("mike123", "[email protected]", "test123")
User.objects.create_user("mike456", "mike@ıxample.org", "test123")
data = {"email": "mike@ıxample.org"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ["mike@ıxample.org"])
def test_user_email_unicode_collision_nonexistent(self):
User.objects.create_user("mike123", "[email protected]", "test123")
data = {"email": "mı[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_user_email_domain_unicode_collision_nonexistent(self):
User.objects.create_user("mike123", "[email protected]", "test123")
data = {"email": "mike@ıxample.org"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_nonexistent_email(self):
"""
Test nonexistent email address. This should not fail because it would
expose information about registered users.
"""
data = {"email": "[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(len(mail.outbox), 0)
def test_cleaned_data(self):
(user, username, email) = self.create_dummy_user()
data = {"email": email}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save(domain_override="example.com")
self.assertEqual(form.cleaned_data["email"], email)
self.assertEqual(len(mail.outbox), 1)
def test_custom_email_subject(self):
data = {"email": "[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
# Since we're not providing a request object, we must provide a
# domain_override to prevent the save operation from failing in the
# potential case where contrib.sites is not installed. Refs #16412.
form.save(domain_override="example.com")
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Custom password reset on example.com")
def test_custom_email_constructor(self):
data = {"email": "[email protected]"}
class CustomEmailPasswordResetForm(PasswordResetForm):
def send_mail(
self,
subject_template_name,
email_template_name,
context,
from_email,
to_email,
html_email_template_name=None,
):
EmailMultiAlternatives(
"Forgot your password?",
"Sorry to hear you forgot your password.",
None,
[to_email],
["[email protected]"],
headers={"Reply-To": "[email protected]"},
alternatives=[
("Really sorry to hear you forgot your password.", "text/html")
],
).send()
form = CustomEmailPasswordResetForm(data)
self.assertTrue(form.is_valid())
# Since we're not providing a request object, we must provide a
# domain_override to prevent the save operation from failing in the
# potential case where contrib.sites is not installed. Refs #16412.
form.save(domain_override="example.com")
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Forgot your password?")
self.assertEqual(mail.outbox[0].bcc, ["[email protected]"])
self.assertEqual(mail.outbox[0].content_subtype, "plain")
def test_preserve_username_case(self):
"""
Preserve the case of the user name (before the @ in the email address)
when creating a user (#5605).
"""
user = User.objects.create_user("forms_test2", "[email protected]", "test")
self.assertEqual(user.email, "[email protected]")
user = User.objects.create_user("forms_test3", "tesT", "test")
self.assertEqual(user.email, "tesT")
def test_inactive_user(self):
"""
Inactive user cannot receive password reset email.
"""
(user, username, email) = self.create_dummy_user()
user.is_active = False
user.save()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_unusable_password(self):
user = User.objects.create_user("testuser", "[email protected]", "test")
data = {"email": "[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
user.set_unusable_password()
user.save()
form = PasswordResetForm(data)
# The form itself is valid, but no email is sent
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_save_plaintext_email(self):
"""
Test the PasswordResetForm.save() method with no html_email_template_name
parameter passed in.
Test to ensure original behavior is unchanged after the parameter was added.
"""
(user, username, email) = self.create_dummy_user()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertFalse(message.is_multipart())
self.assertEqual(message.get_content_type(), "text/plain")
self.assertEqual(message.get("subject"), "Custom password reset on example.com")
self.assertEqual(len(mail.outbox[0].alternatives), 0)
self.assertEqual(message.get_all("to"), [email])
self.assertTrue(
re.match(r"^http://example.com/reset/[\w+/-]", message.get_payload())
)
def test_save_html_email_template_name(self):
"""
Test the PasswordResetForm.save() method with html_email_template_name
parameter specified.
Test to ensure that a multipart email is sent with both text/plain
and text/html parts.
"""
(user, username, email) = self.create_dummy_user()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save(
html_email_template_name="registration/html_password_reset_email.html"
)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(len(mail.outbox[0].alternatives), 1)
message = mail.outbox[0].message()
self.assertEqual(message.get("subject"), "Custom password reset on example.com")
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), "text/plain")
self.assertEqual(message.get_payload(1).get_content_type(), "text/html")
self.assertEqual(message.get_all("to"), [email])
self.assertTrue(
re.match(
r"^http://example.com/reset/[\w/-]+",
message.get_payload(0).get_payload(),
)
)
self.assertTrue(
re.match(
r'^<html><a href="http://example.com/reset/[\w/-]+/">Link</a></html>$',
message.get_payload(1).get_payload(),
)
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomEmailField")
def test_custom_email_field(self):
email = "[email protected]"
CustomEmailField.objects.create_user("test name", "test password", email)
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(form.cleaned_data["email"], email)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [email])
def test_html_autocomplete_attributes(self):
form = PasswordResetForm()
self.assertEqual(form.fields["email"].widget.attrs["autocomplete"], "email")
class ReadOnlyPasswordHashTest(SimpleTestCase):
def test_bug_19349_render_with_none_value(self):
# Rendering the widget with value set to None
# mustn't raise an exception.
widget = ReadOnlyPasswordHashWidget()
html = widget.render(name="password", value=None, attrs={})
self.assertIn(_("No password set."), html)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.PBKDF2PasswordHasher"]
)
def test_render(self):
widget = ReadOnlyPasswordHashWidget()
value = (
"pbkdf2_sha256$100000$a6Pucb1qSFcD$WmCkn9Hqidj48NVe5x0FEM6A9YiOqQcl/83m2Z5u"
"dm0="
)
self.assertHTMLEqual(
widget.render("name", value, {"id": "id_password"}),
'<div id="id_password">'
" <strong>algorithm</strong>: <bdi>pbkdf2_sha256</bdi>"
" <strong>iterations</strong>: <bdi>100000</bdi>"
" <strong>salt</strong>: <bdi>a6Pucb******</bdi>"
" <strong>hash</strong>: "
" <bdi>WmCkn9**************************************</bdi>"
"</div>",
)
def test_readonly_field_has_changed(self):
field = ReadOnlyPasswordHashField()
self.assertIs(field.disabled, True)
self.assertFalse(field.has_changed("aaa", "bbb"))
def test_label(self):
"""
ReadOnlyPasswordHashWidget doesn't contain a for attribute in the
<label> because it doesn't have any labelable elements.
"""
class TestForm(forms.Form):
hash_field = ReadOnlyPasswordHashField()
bound_field = TestForm()["hash_field"]
self.assertIsNone(bound_field.field.widget.id_for_label("id"))
self.assertEqual(bound_field.label_tag(), "<label>Hash field:</label>")
class AdminPasswordChangeFormTest(TestDataMixin, TestCase):
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
user = User.objects.get(username="testclient")
data = {
"password1": "test123",
"password2": "test123",
}
form = AdminPasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
self.assertEqual(form.changed_data, ["password"])
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username="testclient")
data = {
"password1": " pass ",
"password2": " pass ",
}
form = AdminPasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["password1"], data["password1"])
self.assertEqual(form.cleaned_data["password2"], data["password2"])
self.assertEqual(form.changed_data, ["password"])
def test_non_matching_passwords(self):
user = User.objects.get(username="testclient")
data = {"password1": "password1", "password2": "password2"}
form = AdminPasswordChangeForm(user, data)
self.assertEqual(
form.errors["password2"], [form.error_messages["password_mismatch"]]
)
self.assertEqual(form.changed_data, ["password"])
def test_missing_passwords(self):
user = User.objects.get(username="testclient")
data = {"password1": "", "password2": ""}
form = AdminPasswordChangeForm(user, data)
required_error = [Field.default_error_messages["required"]]
self.assertEqual(form.errors["password1"], required_error)
self.assertEqual(form.errors["password2"], required_error)
self.assertEqual(form.changed_data, [])
def test_one_password(self):
user = User.objects.get(username="testclient")
form1 = AdminPasswordChangeForm(user, {"password1": "", "password2": "test"})
required_error = [Field.default_error_messages["required"]]
self.assertEqual(form1.errors["password1"], required_error)
self.assertNotIn("password2", form1.errors)
self.assertEqual(form1.changed_data, [])
form2 = AdminPasswordChangeForm(user, {"password1": "test", "password2": ""})
self.assertEqual(form2.errors["password2"], required_error)
self.assertNotIn("password1", form2.errors)
self.assertEqual(form2.changed_data, [])
def test_html_autocomplete_attributes(self):
user = User.objects.get(username="testclient")
form = AdminPasswordChangeForm(user)
tests = (
("password1", "new-password"),
("password2", "new-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
|
6692d862711bb6d9e67a17ce7dccdba9d6520ec2871870f7583c102c20b09b5f | import threading
import time
from unittest import mock
from multiple_database.routers import TestRouter
from django.core.exceptions import FieldError
from django.db import (
DatabaseError,
NotSupportedError,
connection,
connections,
router,
transaction,
)
from django.test import (
TransactionTestCase,
override_settings,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext
from .models import (
City,
CityCountryProxy,
Country,
EUCity,
EUCountry,
Person,
PersonProfile,
)
class SelectForUpdateTests(TransactionTestCase):
available_apps = ["select_for_update"]
def setUp(self):
# This is executed in autocommit mode so that code in
# run_select_for_update can see this data.
self.country1 = Country.objects.create(name="Belgium")
self.country2 = Country.objects.create(name="France")
self.city1 = City.objects.create(name="Liberchies", country=self.country1)
self.city2 = City.objects.create(name="Samois-sur-Seine", country=self.country2)
self.person = Person.objects.create(
name="Reinhardt", born=self.city1, died=self.city2
)
self.person_profile = PersonProfile.objects.create(person=self.person)
# We need another database connection in transaction to test that one
# connection issuing a SELECT ... FOR UPDATE will block.
self.new_connection = connection.copy()
def tearDown(self):
try:
self.end_blocking_transaction()
except (DatabaseError, AttributeError):
pass
self.new_connection.close()
def start_blocking_transaction(self):
self.new_connection.set_autocommit(False)
# Start a blocking transaction. At some point,
# end_blocking_transaction() should be called.
self.cursor = self.new_connection.cursor()
sql = "SELECT * FROM %(db_table)s %(for_update)s;" % {
"db_table": Person._meta.db_table,
"for_update": self.new_connection.ops.for_update_sql(),
}
self.cursor.execute(sql, ())
self.cursor.fetchone()
def end_blocking_transaction(self):
# Roll back the blocking transaction.
self.cursor.close()
self.new_connection.rollback()
self.new_connection.set_autocommit(True)
def has_for_update_sql(self, queries, **kwargs):
# Examine the SQL that was executed to determine whether it
# contains the 'SELECT..FOR UPDATE' stanza.
for_update_sql = connection.ops.for_update_sql(**kwargs)
return any(for_update_sql in query["sql"] for query in queries)
@skipUnlessDBFeature("has_select_for_update")
def test_for_update_sql_generated(self):
"""
The backend's FOR UPDATE variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update())
self.assertTrue(self.has_for_update_sql(ctx.captured_queries))
@skipUnlessDBFeature("has_select_for_update_nowait")
def test_for_update_sql_generated_nowait(self):
"""
The backend's FOR UPDATE NOWAIT variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update(nowait=True))
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, nowait=True))
@skipUnlessDBFeature("has_select_for_update_skip_locked")
def test_for_update_sql_generated_skip_locked(self):
"""
The backend's FOR UPDATE SKIP LOCKED variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update(skip_locked=True))
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, skip_locked=True))
@skipUnlessDBFeature("has_select_for_no_key_update")
def test_update_sql_generated_no_key(self):
"""
The backend's FOR NO KEY UPDATE variant appears in generated SQL when
select_for_update() is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update(no_key=True))
self.assertIs(self.has_for_update_sql(ctx.captured_queries, no_key=True), True)
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_generated_of(self):
"""
The backend's FOR UPDATE OF variant appears in the generated SQL when
select_for_update() is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
Person.objects.select_related(
"born__country",
)
.select_for_update(
of=("born__country",),
)
.select_for_update(of=("self", "born__country"))
)
features = connections["default"].features
if features.select_for_update_of_column:
expected = [
'select_for_update_person"."id',
'select_for_update_country"."entity_ptr_id',
]
else:
expected = ["select_for_update_person", "select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_inheritance_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(EUCountry.objects.select_for_update(of=("self",)))
if connection.features.select_for_update_of_column:
expected = ['select_for_update_eucountry"."country_ptr_id']
else:
expected = ["select_for_update_eucountry"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_inheritance_ptr_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCountry.objects.select_for_update(
of=(
"self",
"country_ptr",
)
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_eucountry"."country_ptr_id',
'select_for_update_country"."entity_ptr_id',
]
else:
expected = ["select_for_update_eucountry", "select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_related_model_inheritance_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCity.objects.select_related("country").select_for_update(
of=("self", "country"),
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_eucity"."id',
'select_for_update_eucountry"."country_ptr_id',
]
else:
expected = ["select_for_update_eucity", "select_for_update_eucountry"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_inheritance_nested_ptr_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCity.objects.select_related("country").select_for_update(
of=(
"self",
"country__country_ptr",
),
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_eucity"."id',
'select_for_update_country"."entity_ptr_id',
]
else:
expected = ["select_for_update_eucity", "select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_multilevel_model_inheritance_ptr_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCountry.objects.select_for_update(
of=("country_ptr", "country_ptr__entity_ptr"),
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_country"."entity_ptr_id',
'select_for_update_entity"."id',
]
else:
expected = ["select_for_update_country", "select_for_update_entity"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_proxy_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
CityCountryProxy.objects.select_related("country").select_for_update(
of=("country",),
)
)
if connection.features.select_for_update_of_column:
expected = ['select_for_update_country"."entity_ptr_id']
else:
expected = ["select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_of_followed_by_values(self):
with transaction.atomic():
values = list(Person.objects.select_for_update(of=("self",)).values("pk"))
self.assertEqual(values, [{"pk": self.person.pk}])
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_of_followed_by_values_list(self):
with transaction.atomic():
values = list(
Person.objects.select_for_update(of=("self",)).values_list("pk")
)
self.assertEqual(values, [(self.person.pk,)])
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_of_self_when_self_is_not_selected(self):
"""
select_for_update(of=['self']) when the only columns selected are from
related tables.
"""
with transaction.atomic():
values = list(
Person.objects.select_related("born")
.select_for_update(of=("self",))
.values("born__name")
)
self.assertEqual(values, [{"born__name": self.city1.name}])
@skipUnlessDBFeature(
"has_select_for_update_of",
"supports_select_for_update_with_limit",
)
def test_for_update_of_with_exists(self):
with transaction.atomic():
qs = Person.objects.select_for_update(of=("self", "born"))
self.assertIs(qs.exists(), True)
@skipUnlessDBFeature("has_select_for_update_nowait", "supports_transactions")
def test_nowait_raises_error_on_block(self):
"""
If nowait is specified, we expect an error to be raised rather
than blocking.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={"nowait": True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature("has_select_for_update_skip_locked", "supports_transactions")
def test_skip_locked_skips_locked_rows(self):
"""
If skip_locked is specified, the locked row is skipped resulting in
Person.DoesNotExist.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={"skip_locked": True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], Person.DoesNotExist)
@skipIfDBFeature("has_select_for_update_nowait")
@skipUnlessDBFeature("has_select_for_update")
def test_unsupported_nowait_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE NOWAIT is run on
a database backend that supports FOR UPDATE but not NOWAIT.
"""
with self.assertRaisesMessage(
NotSupportedError, "NOWAIT is not supported on this database backend."
):
with transaction.atomic():
Person.objects.select_for_update(nowait=True).get()
@skipIfDBFeature("has_select_for_update_skip_locked")
@skipUnlessDBFeature("has_select_for_update")
def test_unsupported_skip_locked_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE SKIP LOCKED is run
on a database backend that supports FOR UPDATE but not SKIP LOCKED.
"""
with self.assertRaisesMessage(
NotSupportedError, "SKIP LOCKED is not supported on this database backend."
):
with transaction.atomic():
Person.objects.select_for_update(skip_locked=True).get()
@skipIfDBFeature("has_select_for_update_of")
@skipUnlessDBFeature("has_select_for_update")
def test_unsupported_of_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE OF... is run on
a database backend that supports FOR UPDATE but not OF.
"""
msg = "FOR UPDATE OF is not supported on this database backend."
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
Person.objects.select_for_update(of=("self",)).get()
@skipIfDBFeature("has_select_for_no_key_update")
@skipUnlessDBFeature("has_select_for_update")
def test_unsuported_no_key_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR NO KEY UPDATE... is run
on a database backend that supports FOR UPDATE but not NO KEY.
"""
msg = "FOR NO KEY UPDATE is not supported on this database backend."
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
Person.objects.select_for_update(no_key=True).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_unrelated_of_argument_raises_error(self):
"""
FieldError is raised if a non-relation field is specified in of=(...).
"""
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): %s. "
"Only relational fields followed in the query are allowed. "
"Choices are: self, born, born__country, "
"born__country__entity_ptr."
)
invalid_of = [
("nonexistent",),
("name",),
("born__nonexistent",),
("born__name",),
("born__nonexistent", "born__name"),
]
for of in invalid_of:
with self.subTest(of=of):
with self.assertRaisesMessage(FieldError, msg % ", ".join(of)):
with transaction.atomic():
Person.objects.select_related(
"born__country"
).select_for_update(of=of).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_related_but_unselected_of_argument_raises_error(self):
"""
FieldError is raised if a relation field that is not followed in the
query is specified in of=(...).
"""
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): %s. "
"Only relational fields followed in the query are allowed. "
"Choices are: self, born, profile."
)
for name in ["born__country", "died", "died__country"]:
with self.subTest(name=name):
with self.assertRaisesMessage(FieldError, msg % name):
with transaction.atomic():
Person.objects.select_related("born", "profile").exclude(
profile=None
).select_for_update(of=(name,)).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_model_inheritance_of_argument_raises_error_ptr_in_choices(self):
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): "
"name. Only relational fields followed in the query are allowed. "
"Choices are: self, %s."
)
with self.assertRaisesMessage(
FieldError,
msg % "country, country__country_ptr, country__country_ptr__entity_ptr",
):
with transaction.atomic():
EUCity.objects.select_related(
"country",
).select_for_update(of=("name",)).get()
with self.assertRaisesMessage(
FieldError, msg % "country_ptr, country_ptr__entity_ptr"
):
with transaction.atomic():
EUCountry.objects.select_for_update(of=("name",)).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_model_proxy_of_argument_raises_error_proxy_field_in_choices(self):
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): "
"name. Only relational fields followed in the query are allowed. "
"Choices are: self, country, country__entity_ptr."
)
with self.assertRaisesMessage(FieldError, msg):
with transaction.atomic():
CityCountryProxy.objects.select_related(
"country",
).select_for_update(of=("name",)).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_reverse_one_to_one_of_arguments(self):
"""
Reverse OneToOneFields may be included in of=(...) as long as NULLs
are excluded because LEFT JOIN isn't allowed in SELECT FOR UPDATE.
"""
with transaction.atomic():
person = (
Person.objects.select_related(
"profile",
)
.exclude(profile=None)
.select_for_update(of=("profile",))
.get()
)
self.assertEqual(person.profile, self.person_profile)
@skipUnlessDBFeature("has_select_for_update")
def test_for_update_after_from(self):
features_class = connections["default"].features.__class__
attribute_to_patch = "%s.%s.for_update_after_from" % (
features_class.__module__,
features_class.__name__,
)
with mock.patch(attribute_to_patch, return_value=True):
with transaction.atomic():
self.assertIn(
"FOR UPDATE WHERE",
str(Person.objects.filter(name="foo").select_for_update().query),
)
@skipUnlessDBFeature("has_select_for_update", "supports_transactions")
def test_for_update_requires_transaction(self):
"""
A TransactionManagementError is raised
when a select_for_update query is executed outside of a transaction.
"""
msg = "select_for_update cannot be used outside of a transaction."
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
list(Person.objects.select_for_update())
@skipUnlessDBFeature("has_select_for_update", "supports_transactions")
def test_for_update_requires_transaction_only_in_execution(self):
"""
No TransactionManagementError is raised
when select_for_update is invoked outside of a transaction -
only when the query is executed.
"""
people = Person.objects.select_for_update()
msg = "select_for_update cannot be used outside of a transaction."
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
list(people)
@skipUnlessDBFeature("supports_select_for_update_with_limit")
def test_select_for_update_with_limit(self):
other = Person.objects.create(name="Grappeli", born=self.city1, died=self.city2)
with transaction.atomic():
qs = list(Person.objects.order_by("pk").select_for_update()[1:2])
self.assertEqual(qs[0], other)
@skipIfDBFeature("supports_select_for_update_with_limit")
def test_unsupported_select_for_update_with_limit(self):
msg = (
"LIMIT/OFFSET is not supported with select_for_update on this database "
"backend."
)
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
list(Person.objects.order_by("pk").select_for_update()[1:2])
def run_select_for_update(self, status, **kwargs):
"""
Utility method that runs a SELECT FOR UPDATE against all
Person instances. After the select_for_update, it attempts
to update the name of the only record, save, and commit.
This function expects to run in a separate thread.
"""
status.append("started")
try:
# We need to enter transaction management again, as this is done on
# per-thread basis
with transaction.atomic():
person = Person.objects.select_for_update(**kwargs).get()
person.name = "Fred"
person.save()
except (DatabaseError, Person.DoesNotExist) as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
connection.close()
@skipUnlessDBFeature("has_select_for_update")
@skipUnlessDBFeature("supports_transactions")
def test_block(self):
"""
A thread running a select_for_update that accesses rows being touched
by a similar operation on another connection blocks correctly.
"""
# First, let's start the transaction in our thread.
self.start_blocking_transaction()
# Now, try it again using the ORM's select_for_update
# facility. Do this in a separate thread.
status = []
thread = threading.Thread(target=self.run_select_for_update, args=(status,))
# The thread should immediately block, but we'll sleep
# for a bit to make sure.
thread.start()
sanity_count = 0
while len(status) != 1 and sanity_count < 10:
sanity_count += 1
time.sleep(1)
if sanity_count >= 10:
raise ValueError("Thread did not run and block")
# Check the person hasn't been updated. Since this isn't
# using FOR UPDATE, it won't block.
p = Person.objects.get(pk=self.person.pk)
self.assertEqual("Reinhardt", p.name)
# When we end our blocking transaction, our thread should
# be able to continue.
self.end_blocking_transaction()
thread.join(5.0)
# Check the thread has finished. Assuming it has, we should
# find that it has updated the person's name.
self.assertFalse(thread.is_alive())
# We must commit the transaction to ensure that MySQL gets a fresh read,
# since by default it runs in REPEATABLE READ mode
transaction.commit()
p = Person.objects.get(pk=self.person.pk)
self.assertEqual("Fred", p.name)
@skipUnlessDBFeature("has_select_for_update", "supports_transactions")
def test_raw_lock_not_available(self):
"""
Running a raw query which can't obtain a FOR UPDATE lock raises
the correct exception
"""
self.start_blocking_transaction()
def raw(status):
try:
list(
Person.objects.raw(
"SELECT * FROM %s %s"
% (
Person._meta.db_table,
connection.ops.for_update_sql(nowait=True),
)
)
)
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
# Connection cannot be closed on Oracle because cursor is still
# open.
if connection.vendor != "oracle":
connection.close()
status = []
thread = threading.Thread(target=raw, kwargs={"status": status})
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature("has_select_for_update")
@override_settings(DATABASE_ROUTERS=[TestRouter()])
def test_select_for_update_on_multidb(self):
query = Person.objects.select_for_update()
self.assertEqual(router.db_for_write(Person), query.db)
@skipUnlessDBFeature("has_select_for_update")
def test_select_for_update_with_get(self):
with transaction.atomic():
person = Person.objects.select_for_update().get(name="Reinhardt")
self.assertEqual(person.name, "Reinhardt")
def test_nowait_and_skip_locked(self):
with self.assertRaisesMessage(
ValueError, "The nowait option cannot be used with skip_locked."
):
Person.objects.select_for_update(nowait=True, skip_locked=True)
def test_ordered_select_for_update(self):
"""
Subqueries should respect ordering as an ORDER BY clause may be useful
to specify a row locking order to prevent deadlocks (#27193).
"""
with transaction.atomic():
qs = Person.objects.filter(
id__in=Person.objects.order_by("-id").select_for_update()
)
self.assertIn("ORDER BY", str(qs.query))
|
502b3f10f08c4039b22883cb24c80706ebd22ddbc42c1ccdea3f959c41f560ff | import datetime
import math
import re
from decimal import Decimal
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models import (
Avg,
Case,
Count,
DateField,
DateTimeField,
DecimalField,
DurationField,
Exists,
F,
FloatField,
IntegerField,
Max,
Min,
OuterRef,
Q,
StdDev,
Subquery,
Sum,
TimeField,
Value,
Variance,
When,
)
from django.db.models.expressions import Func, RawSQL
from django.db.models.functions import (
Cast,
Coalesce,
Greatest,
Lower,
Now,
Pi,
TruncDate,
TruncHour,
)
from django.test import TestCase
from django.test.testcases import skipUnlessDBFeature
from django.test.utils import Approximate, CaptureQueriesContext
from django.utils import timezone
from .models import Author, Book, Publisher, Store
class NowUTC(Now):
template = "CURRENT_TIMESTAMP"
output_field = DateTimeField()
def as_sql(self, compiler, connection, **extra_context):
if connection.features.test_now_utc_template:
extra_context["template"] = connection.features.test_now_utc_template
return super().as_sql(compiler, connection, **extra_context)
class AggregateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="Brad Dayley", age=45)
cls.a4 = Author.objects.create(name="James Bennett", age=29)
cls.a5 = Author.objects.create(name="Jeffrey Forcier", age=37)
cls.a6 = Author.objects.create(name="Paul Bissex", age=29)
cls.a7 = Author.objects.create(name="Wesley J. Chun", age=25)
cls.a8 = Author.objects.create(name="Peter Norvig", age=57)
cls.a9 = Author.objects.create(name="Stuart Russell", age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(
name="Apress", num_awards=3, duration=datetime.timedelta(days=1)
)
cls.p2 = Publisher.objects.create(
name="Sams", num_awards=1, duration=datetime.timedelta(days=2)
)
cls.p3 = Publisher.objects.create(name="Prentice Hall", num_awards=7)
cls.p4 = Publisher.objects.create(name="Morgan Kaufmann", num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn="159059725",
name="The Definitive Guide to Django: Web Development Done Right",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6),
)
cls.b2 = Book.objects.create(
isbn="067232959",
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
rating=3.0,
price=Decimal("23.09"),
contact=cls.a3,
publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3),
)
cls.b3 = Book.objects.create(
isbn="159059996",
name="Practical Django Projects",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a4,
publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23),
)
cls.b4 = Book.objects.create(
isbn="013235613",
name="Python Web Development with Django",
pages=350,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a5,
publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3),
)
cls.b5 = Book.objects.create(
isbn="013790395",
name="Artificial Intelligence: A Modern Approach",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a8,
publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15),
)
cls.b6 = Book.objects.create(
isbn="155860191",
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a8,
publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15),
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
s2 = Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30),
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def test_empty_aggregate(self):
self.assertEqual(Author.objects.aggregate(), {})
def test_aggregate_in_order_by(self):
msg = (
"Using an aggregate in order_by() without also including it in "
"annotate() is not allowed: Avg(F(book__rating)"
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.values("age").order_by(Avg("book__rating"))
def test_single_aggregate(self):
vals = Author.objects.aggregate(Avg("age"))
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
def test_multiple_aggregates(self):
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
self.assertEqual(
vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)}
)
def test_filter_aggregate(self):
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
self.assertEqual(vals, {"age__sum": 254})
def test_related_aggregate(self):
vals = Author.objects.aggregate(Avg("friends__age"))
self.assertEqual(vals, {"friends__age__avg": Approximate(34.07, places=2)})
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
self.assertEqual(vals, {"authors__age__avg": Approximate(38.2857, places=2)})
vals = Author.objects.filter(name__contains="a").aggregate(Avg("book__rating"))
self.assertEqual(vals, {"book__rating__avg": 4.0})
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
self.assertEqual(vals, {"publisher__num_awards__sum": 30})
vals = Publisher.objects.aggregate(Sum("book__price"))
self.assertEqual(vals, {"book__price__sum": Decimal("270.27")})
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(vals, {"books__authors__age__max": 57})
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(vals, {"book__publisher__num_awards__min": 1})
def test_aggregate_alias(self):
vals = Store.objects.filter(name="Amazon.com").aggregate(
amazon_mean=Avg("books__rating")
)
self.assertEqual(vals, {"amazon_mean": Approximate(4.08, places=2)})
def test_aggregate_transform(self):
vals = Store.objects.aggregate(min_month=Min("original_opening__month"))
self.assertEqual(vals, {"min_month": 3})
def test_aggregate_join_transform(self):
vals = Publisher.objects.aggregate(min_year=Min("book__pubdate__year"))
self.assertEqual(vals, {"min_year": 1991})
def test_annotate_basic(self):
self.assertQuerySetEqual(
Book.objects.annotate().order_by("pk"),
[
"The Definitive Guide to Django: Web Development Done Right",
"Sams Teach Yourself Django in 24 Hours",
"Practical Django Projects",
"Python Web Development with Django",
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
],
lambda b: b.name,
)
books = Book.objects.annotate(mean_age=Avg("authors__age"))
b = books.get(pk=self.b1.pk)
self.assertEqual(
b.name, "The Definitive Guide to Django: Web Development Done Right"
)
self.assertEqual(b.mean_age, 34.5)
def test_annotate_defer(self):
qs = (
Book.objects.annotate(page_sum=Sum("pages"))
.defer("name")
.filter(pk=self.b1.pk)
)
rows = [
(
self.b1.id,
"159059725",
447,
"The Definitive Guide to Django: Web Development Done Right",
)
]
self.assertQuerySetEqual(
qs.order_by("pk"), rows, lambda r: (r.id, r.isbn, r.page_sum, r.name)
)
def test_annotate_defer_select_related(self):
qs = (
Book.objects.select_related("contact")
.annotate(page_sum=Sum("pages"))
.defer("name")
.filter(pk=self.b1.pk)
)
rows = [
(
self.b1.id,
"159059725",
447,
"Adrian Holovaty",
"The Definitive Guide to Django: Web Development Done Right",
)
]
self.assertQuerySetEqual(
qs.order_by("pk"),
rows,
lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name),
)
def test_annotate_m2m(self):
books = (
Book.objects.filter(rating__lt=4.5)
.annotate(Avg("authors__age"))
.order_by("name")
)
self.assertQuerySetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 51.5),
("Practical Django Projects", 29.0),
("Python Web Development with Django", Approximate(30.3, places=1)),
("Sams Teach Yourself Django in 24 Hours", 45.0),
],
lambda b: (b.name, b.authors__age__avg),
)
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
self.assertQuerySetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 2),
(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
1,
),
("Practical Django Projects", 1),
("Python Web Development with Django", 3),
("Sams Teach Yourself Django in 24 Hours", 1),
("The Definitive Guide to Django: Web Development Done Right", 2),
],
lambda b: (b.name, b.num_authors),
)
def test_backwards_m2m_annotate(self):
authors = (
Author.objects.filter(name__contains="a")
.annotate(Avg("book__rating"))
.order_by("name")
)
self.assertQuerySetEqual(
authors,
[
("Adrian Holovaty", 4.5),
("Brad Dayley", 3.0),
("Jacob Kaplan-Moss", 4.5),
("James Bennett", 4.0),
("Paul Bissex", 4.0),
("Stuart Russell", 4.0),
],
lambda a: (a.name, a.book__rating__avg),
)
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
self.assertQuerySetEqual(
authors,
[
("Adrian Holovaty", 1),
("Brad Dayley", 1),
("Jacob Kaplan-Moss", 1),
("James Bennett", 1),
("Jeffrey Forcier", 1),
("Paul Bissex", 1),
("Peter Norvig", 2),
("Stuart Russell", 1),
("Wesley J. Chun", 1),
],
lambda a: (a.name, a.num_books),
)
def test_reverse_fkey_annotate(self):
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
self.assertQuerySetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 7),
(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
9,
),
("Practical Django Projects", 3),
("Python Web Development with Django", 7),
("Sams Teach Yourself Django in 24 Hours", 1),
("The Definitive Guide to Django: Web Development Done Right", 3),
],
lambda b: (b.name, b.publisher__num_awards__sum),
)
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
self.assertQuerySetEqual(
publishers,
[
("Apress", Decimal("59.69")),
("Jonno's House of Books", None),
("Morgan Kaufmann", Decimal("75.00")),
("Prentice Hall", Decimal("112.49")),
("Sams", Decimal("23.09")),
],
lambda p: (p.name, p.book__price__sum),
)
def test_annotate_values(self):
books = list(
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values()
)
self.assertEqual(
books,
[
{
"contact_id": self.a1.id,
"id": self.b1.id,
"isbn": "159059725",
"mean_age": 34.5,
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": self.p1.id,
"rating": 4.5,
}
],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values("pk", "isbn", "mean_age")
)
self.assertEqual(
list(books),
[
{
"pk": self.b1.pk,
"isbn": "159059725",
"mean_age": 34.5,
}
],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values("name")
)
self.assertEqual(
list(books),
[{"name": "The Definitive Guide to Django: Web Development Done Right"}],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.values()
.annotate(mean_age=Avg("authors__age"))
)
self.assertEqual(
list(books),
[
{
"contact_id": self.a1.id,
"id": self.b1.id,
"isbn": "159059725",
"mean_age": 34.5,
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": self.p1.id,
"rating": 4.5,
}
],
)
books = (
Book.objects.values("rating")
.annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age"))
.order_by("rating")
)
self.assertEqual(
list(books),
[
{
"rating": 3.0,
"n_authors": 1,
"mean_age": 45.0,
},
{
"rating": 4.0,
"n_authors": 6,
"mean_age": Approximate(37.16, places=1),
},
{
"rating": 4.5,
"n_authors": 2,
"mean_age": 34.5,
},
{
"rating": 5.0,
"n_authors": 1,
"mean_age": 57.0,
},
],
)
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
self.assertQuerySetEqual(
authors,
[
("Adrian Holovaty", 32.0),
("Brad Dayley", None),
("Jacob Kaplan-Moss", 29.5),
("James Bennett", 34.0),
("Jeffrey Forcier", 27.0),
("Paul Bissex", 31.0),
("Peter Norvig", 46.0),
("Stuart Russell", 57.0),
("Wesley J. Chun", Approximate(33.66, places=1)),
],
lambda a: (a.name, a.friends__age__avg),
)
def test_count(self):
vals = Book.objects.aggregate(Count("rating"))
self.assertEqual(vals, {"rating__count": 6})
def test_count_star(self):
with self.assertNumQueries(1) as ctx:
Book.objects.aggregate(n=Count("*"))
sql = ctx.captured_queries[0]["sql"]
self.assertIn("SELECT COUNT(*) ", sql)
def test_count_distinct_expression(self):
aggs = Book.objects.aggregate(
distinct_ratings=Count(
Case(When(pages__gt=300, then="rating")), distinct=True
),
)
self.assertEqual(aggs["distinct_ratings"], 4)
def test_distinct_on_aggregate(self):
for aggregate, expected_result in (
(Avg, 4.125),
(Count, 4),
(Sum, 16.5),
):
with self.subTest(aggregate=aggregate.__name__):
books = Book.objects.aggregate(
ratings=aggregate("rating", distinct=True)
)
self.assertEqual(books["ratings"], expected_result)
def test_non_grouped_annotation_not_in_group_by(self):
"""
An annotation not included in values() before an aggregate should be
excluded from the group by clause.
"""
qs = (
Book.objects.annotate(xprice=F("price"))
.filter(rating=4.0)
.values("rating")
.annotate(count=Count("publisher_id", distinct=True))
.values("count", "rating")
.order_by("count")
)
self.assertEqual(list(qs), [{"rating": 4.0, "count": 2}])
def test_grouped_annotation_in_group_by(self):
"""
An annotation included in values() before an aggregate should be
included in the group by clause.
"""
qs = (
Book.objects.annotate(xprice=F("price"))
.filter(rating=4.0)
.values("rating", "xprice")
.annotate(count=Count("publisher_id", distinct=True))
.values("count", "rating")
.order_by("count")
)
self.assertEqual(
list(qs),
[
{"rating": 4.0, "count": 1},
{"rating": 4.0, "count": 2},
],
)
def test_fkey_aggregate(self):
explicit = list(Author.objects.annotate(Count("book__id")))
implicit = list(Author.objects.annotate(Count("book")))
self.assertCountEqual(explicit, implicit)
def test_annotate_ordering(self):
books = (
Book.objects.values("rating")
.annotate(oldest=Max("authors__age"))
.order_by("oldest", "rating")
)
self.assertEqual(
list(books),
[
{"rating": 4.5, "oldest": 35},
{"rating": 3.0, "oldest": 45},
{"rating": 4.0, "oldest": 57},
{"rating": 5.0, "oldest": 57},
],
)
books = (
Book.objects.values("rating")
.annotate(oldest=Max("authors__age"))
.order_by("-oldest", "-rating")
)
self.assertEqual(
list(books),
[
{"rating": 5.0, "oldest": 57},
{"rating": 4.0, "oldest": 57},
{"rating": 3.0, "oldest": 45},
{"rating": 4.5, "oldest": 35},
],
)
def test_aggregate_annotation(self):
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(
Avg("num_authors")
)
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
def test_avg_duration_field(self):
# Explicit `output_field`.
self.assertEqual(
Publisher.objects.aggregate(Avg("duration", output_field=DurationField())),
{"duration__avg": datetime.timedelta(days=1, hours=12)},
)
# Implicit `output_field`.
self.assertEqual(
Publisher.objects.aggregate(Avg("duration")),
{"duration__avg": datetime.timedelta(days=1, hours=12)},
)
def test_sum_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Sum("duration", output_field=DurationField())),
{"duration__sum": datetime.timedelta(days=3)},
)
def test_sum_distinct_aggregate(self):
"""
Sum on a distinct() QuerySet should aggregate only the distinct items.
"""
authors = Author.objects.filter(book__in=[self.b5, self.b6])
self.assertEqual(authors.count(), 3)
distinct_authors = authors.distinct()
self.assertEqual(distinct_authors.count(), 2)
# Selected author ages are 57 and 46
age_sum = distinct_authors.aggregate(Sum("age"))
self.assertEqual(age_sum["age__sum"], 103)
def test_filtering(self):
p = Publisher.objects.create(name="Expensive Publisher", num_awards=0)
Book.objects.create(
name="ExpensiveBook1",
pages=1,
isbn="111",
rating=3.5,
price=Decimal("1000"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 1),
)
Book.objects.create(
name="ExpensiveBook2",
pages=1,
isbn="222",
rating=4.0,
price=Decimal("1000"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 2),
)
Book.objects.create(
name="ExpensiveBook3",
pages=1,
isbn="333",
rating=4.5,
price=Decimal("35"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 3),
)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerySetEqual(
publishers,
["Apress", "Prentice Hall", "Expensive Publisher"],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by(
"pk"
)
self.assertQuerySetEqual(
publishers,
[
"Apress",
"Apress",
"Sams",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1, book__price__lt=Decimal("40.0"))
.order_by("pk")
)
self.assertQuerySetEqual(
publishers,
["Apress", "Prentice Hall", "Expensive Publisher"],
lambda p: p.name,
)
publishers = (
Publisher.objects.filter(book__price__lt=Decimal("40.0"))
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerySetEqual(publishers, ["Apress"], lambda p: p.name)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__range=[1, 3])
.order_by("pk")
)
self.assertQuerySetEqual(
publishers,
[
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__range=[1, 2])
.order_by("pk")
)
self.assertQuerySetEqual(
publishers,
["Apress", "Sams", "Prentice Hall", "Morgan Kaufmann"],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__in=[1, 3])
.order_by("pk")
)
self.assertQuerySetEqual(
publishers,
["Sams", "Morgan Kaufmann", "Expensive Publisher"],
lambda p: p.name,
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(
num_books__isnull=True
)
self.assertEqual(len(publishers), 0)
def test_annotation(self):
vals = Author.objects.filter(pk=self.a1.pk).aggregate(Count("friends__id"))
self.assertEqual(vals, {"friends__id__count": 2})
books = (
Book.objects.annotate(num_authors=Count("authors__name"))
.filter(num_authors__exact=2)
.order_by("pk")
)
self.assertQuerySetEqual(
books,
[
"The Definitive Guide to Django: Web Development Done Right",
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name,
)
authors = (
Author.objects.annotate(num_friends=Count("friends__id", distinct=True))
.filter(num_friends=0)
.order_by("pk")
)
self.assertQuerySetEqual(authors, ["Brad Dayley"], lambda a: a.name)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerySetEqual(
publishers, ["Apress", "Prentice Hall"], lambda p: p.name
)
publishers = (
Publisher.objects.filter(book__price__lt=Decimal("40.0"))
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
)
self.assertQuerySetEqual(publishers, ["Apress"], lambda p: p.name)
books = Book.objects.annotate(num_authors=Count("authors__id")).filter(
authors__name__contains="Norvig", num_authors__gt=1
)
self.assertQuerySetEqual(
books, ["Artificial Intelligence: A Modern Approach"], lambda b: b.name
)
def test_more_aggregation(self):
a = Author.objects.get(name__contains="Norvig")
b = Book.objects.get(name__contains="Done Right")
b.authors.add(a)
b.save()
vals = (
Book.objects.annotate(num_authors=Count("authors__id"))
.filter(authors__name__contains="Norvig", num_authors__gt=1)
.aggregate(Avg("rating"))
)
self.assertEqual(vals, {"rating__avg": 4.25})
def test_even_more_aggregate(self):
publishers = (
Publisher.objects.annotate(
earliest_book=Min("book__pubdate"),
)
.exclude(earliest_book=None)
.order_by("earliest_book")
.values(
"earliest_book",
"num_awards",
"id",
"name",
)
)
self.assertEqual(
list(publishers),
[
{
"earliest_book": datetime.date(1991, 10, 15),
"num_awards": 9,
"id": self.p4.id,
"name": "Morgan Kaufmann",
},
{
"earliest_book": datetime.date(1995, 1, 15),
"num_awards": 7,
"id": self.p3.id,
"name": "Prentice Hall",
},
{
"earliest_book": datetime.date(2007, 12, 6),
"num_awards": 3,
"id": self.p1.id,
"name": "Apress",
},
{
"earliest_book": datetime.date(2008, 3, 3),
"num_awards": 1,
"id": self.p2.id,
"name": "Sams",
},
],
)
vals = Store.objects.aggregate(
Max("friday_night_closing"), Min("original_opening")
)
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
)
def test_annotate_values_list(self):
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("pk", "isbn", "mean_age")
)
self.assertEqual(list(books), [(self.b1.id, "159059725", 34.5)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("isbn")
)
self.assertEqual(list(books), [("159059725",)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("mean_age")
)
self.assertEqual(list(books), [(34.5,)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("mean_age", flat=True)
)
self.assertEqual(list(books), [34.5])
books = (
Book.objects.values_list("price")
.annotate(count=Count("price"))
.order_by("-count", "price")
)
self.assertEqual(
list(books),
[
(Decimal("29.69"), 2),
(Decimal("23.09"), 1),
(Decimal("30"), 1),
(Decimal("75"), 1),
(Decimal("82.8"), 1),
],
)
def test_dates_with_aggregation(self):
"""
.dates() returns a distinct set of dates when applied to a
QuerySet with aggregation.
Refs #18056. Previously, .dates() would return distinct (date_kind,
aggregation) sets, in this case (year, num_authors), so 2008 would be
returned twice because there are books from 2008 with a different
number of authors.
"""
dates = Book.objects.annotate(num_authors=Count("authors")).dates(
"pubdate", "year"
)
self.assertSequenceEqual(
dates,
[
datetime.date(1991, 1, 1),
datetime.date(1995, 1, 1),
datetime.date(2007, 1, 1),
datetime.date(2008, 1, 1),
],
)
def test_values_aggregation(self):
# Refs #20782
max_rating = Book.objects.values("rating").aggregate(max_rating=Max("rating"))
self.assertEqual(max_rating["max_rating"], 5)
max_books_per_rating = (
Book.objects.values("rating")
.annotate(books_per_rating=Count("id"))
.aggregate(Max("books_per_rating"))
)
self.assertEqual(max_books_per_rating, {"books_per_rating__max": 3})
def test_ticket17424(self):
"""
Doing exclude() on a foreign model after annotate() doesn't crash.
"""
all_books = list(Book.objects.values_list("pk", flat=True).order_by("pk"))
annotated_books = Book.objects.order_by("pk").annotate(one=Count("id"))
# The value doesn't matter, we just need any negative
# constraint on a related model that's a noop.
excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__")
# Try to generate query tree
str(excluded_books.query)
self.assertQuerySetEqual(excluded_books, all_books, lambda x: x.pk)
# Check internal state
self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type)
self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type)
def test_ticket12886(self):
"""
Aggregation over sliced queryset works correctly.
"""
qs = Book.objects.order_by("-rating")[0:3]
vals = qs.aggregate(average_top3_rating=Avg("rating"))["average_top3_rating"]
self.assertAlmostEqual(vals, 4.5, places=2)
def test_ticket11881(self):
"""
Subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE or
select_related() stuff.
"""
qs = (
Book.objects.select_for_update()
.order_by("pk")
.select_related("publisher")
.annotate(max_pk=Max("pk"))
)
with CaptureQueriesContext(connection) as captured_queries:
qs.aggregate(avg_pk=Avg("max_pk"))
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]["sql"].lower()
self.assertNotIn("for update", qstr)
forced_ordering = connection.ops.force_no_ordering()
if forced_ordering:
# If the backend needs to force an ordering we make sure it's
# the only "ORDER BY" clause present in the query.
self.assertEqual(
re.findall(r"order by (\w+)", qstr),
[", ".join(f[1][0] for f in forced_ordering).lower()],
)
else:
self.assertNotIn("order by", qstr)
self.assertEqual(qstr.count(" join "), 0)
def test_decimal_max_digits_has_no_effect(self):
Book.objects.all().delete()
a1 = Author.objects.first()
p1 = Publisher.objects.first()
thedate = timezone.now()
for i in range(10):
Book.objects.create(
isbn="abcde{}".format(i),
name="none",
pages=10,
rating=4.0,
price=9999.98,
contact=a1,
publisher=p1,
pubdate=thedate,
)
book = Book.objects.aggregate(price_sum=Sum("price"))
self.assertEqual(book["price_sum"], Decimal("99999.80"))
def test_nonaggregate_aggregation_throws(self):
with self.assertRaisesMessage(TypeError, "fail is not an aggregate expression"):
Book.objects.aggregate(fail=F("price"))
def test_nonfield_annotation(self):
book = Book.objects.annotate(val=Max(Value(2))).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(
val=Max(Value(2), output_field=IntegerField())
).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(2, output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
def test_annotation_expressions(self):
authors = Author.objects.annotate(
combined_ages=Sum(F("age") + F("friends__age"))
).order_by("name")
authors2 = Author.objects.annotate(
combined_ages=Sum("age") + Sum("friends__age")
).order_by("name")
for qs in (authors, authors2):
self.assertQuerySetEqual(
qs,
[
("Adrian Holovaty", 132),
("Brad Dayley", None),
("Jacob Kaplan-Moss", 129),
("James Bennett", 63),
("Jeffrey Forcier", 128),
("Paul Bissex", 120),
("Peter Norvig", 103),
("Stuart Russell", 103),
("Wesley J. Chun", 176),
],
lambda a: (a.name, a.combined_ages),
)
def test_aggregation_expressions(self):
a1 = Author.objects.aggregate(av_age=Sum("age") / Count("*"))
a2 = Author.objects.aggregate(av_age=Sum("age") / Count("age"))
a3 = Author.objects.aggregate(av_age=Avg("age"))
self.assertEqual(a1, {"av_age": 37})
self.assertEqual(a2, {"av_age": 37})
self.assertEqual(a3, {"av_age": Approximate(37.4, places=1)})
def test_avg_decimal_field(self):
v = Book.objects.filter(rating=4).aggregate(avg_price=(Avg("price")))[
"avg_price"
]
self.assertIsInstance(v, Decimal)
self.assertEqual(v, Approximate(Decimal("47.39"), places=2))
def test_order_of_precedence(self):
p1 = Book.objects.filter(rating=4).aggregate(avg_price=(Avg("price") + 2) * 3)
self.assertEqual(p1, {"avg_price": Approximate(Decimal("148.18"), places=2)})
p2 = Book.objects.filter(rating=4).aggregate(avg_price=Avg("price") + 2 * 3)
self.assertEqual(p2, {"avg_price": Approximate(Decimal("53.39"), places=2)})
def test_combine_different_types(self):
msg = (
"Cannot infer type of '+' expression involving these types: FloatField, "
"DecimalField. You must set output_field."
)
qs = Book.objects.annotate(sums=Sum("rating") + Sum("pages") + Sum("price"))
with self.assertRaisesMessage(FieldError, msg):
qs.first()
with self.assertRaisesMessage(FieldError, msg):
qs.first()
b1 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=IntegerField())
).get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
b2 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=FloatField())
).get(pk=self.b4.pk)
self.assertEqual(b2.sums, 383.69)
b3 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=DecimalField())
).get(pk=self.b4.pk)
self.assertEqual(b3.sums, Approximate(Decimal("383.69"), places=2))
def test_complex_aggregations_require_kwarg(self):
with self.assertRaisesMessage(
TypeError, "Complex annotations require an alias"
):
Author.objects.annotate(Sum(F("age") + F("friends__age")))
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Author.objects.aggregate(Sum("age") / Count("age"))
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Author.objects.aggregate(Sum(1))
def test_aggregate_over_complex_annotation(self):
qs = Author.objects.annotate(combined_ages=Sum(F("age") + F("friends__age")))
age = qs.aggregate(max_combined_age=Max("combined_ages"))
self.assertEqual(age["max_combined_age"], 176)
age = qs.aggregate(max_combined_age_doubled=Max("combined_ages") * 2)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages")
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages"),
sum_combined_age=Sum("combined_ages"),
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
self.assertEqual(age["sum_combined_age"], 954)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages"),
sum_combined_age_doubled=Sum("combined_ages") + Sum("combined_ages"),
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
self.assertEqual(age["sum_combined_age_doubled"], 954 * 2)
def test_values_annotation_with_expression(self):
# ensure the F() is promoted to the group by clause
qs = Author.objects.values("name").annotate(another_age=Sum("age") + F("age"))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a["another_age"], 68)
qs = qs.annotate(friend_count=Count("friends"))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a["friend_count"], 2)
qs = (
qs.annotate(combined_age=Sum("age") + F("friends__age"))
.filter(name="Adrian Holovaty")
.order_by("-combined_age")
)
self.assertEqual(
list(qs),
[
{
"name": "Adrian Holovaty",
"another_age": 68,
"friend_count": 1,
"combined_age": 69,
},
{
"name": "Adrian Holovaty",
"another_age": 68,
"friend_count": 1,
"combined_age": 63,
},
],
)
vals = qs.values("name", "combined_age")
self.assertEqual(
list(vals),
[
{"name": "Adrian Holovaty", "combined_age": 69},
{"name": "Adrian Holovaty", "combined_age": 63},
],
)
def test_annotate_values_aggregate(self):
alias_age = (
Author.objects.annotate(age_alias=F("age"))
.values(
"age_alias",
)
.aggregate(sum_age=Sum("age_alias"))
)
age = Author.objects.values("age").aggregate(sum_age=Sum("age"))
self.assertEqual(alias_age["sum_age"], age["sum_age"])
def test_annotate_over_annotate(self):
author = (
Author.objects.annotate(age_alias=F("age"))
.annotate(sum_age=Sum("age_alias"))
.get(name="Adrian Holovaty")
)
other_author = Author.objects.annotate(sum_age=Sum("age")).get(
name="Adrian Holovaty"
)
self.assertEqual(author.sum_age, other_author.sum_age)
def test_aggregate_over_aggregate(self):
msg = "Cannot resolve keyword 'age_agg' into field."
with self.assertRaisesMessage(FieldError, msg):
Author.objects.aggregate(
age_agg=Sum(F("age")),
avg_age=Avg(F("age_agg")),
)
def test_annotated_aggregate_over_annotated_aggregate(self):
with self.assertRaisesMessage(
FieldError, "Cannot compute Sum('id__max'): 'id__max' is an aggregate"
):
Book.objects.annotate(Max("id")).annotate(Sum("id__max"))
class MyMax(Max):
def as_sql(self, compiler, connection):
self.set_source_expressions(self.get_source_expressions()[0:1])
return super().as_sql(compiler, connection)
with self.assertRaisesMessage(
FieldError, "Cannot compute Max('id__max'): 'id__max' is an aggregate"
):
Book.objects.annotate(Max("id")).annotate(my_max=MyMax("id__max", "price"))
def test_multi_arg_aggregate(self):
class MyMax(Max):
output_field = DecimalField()
def as_sql(self, compiler, connection):
copy = self.copy()
copy.set_source_expressions(copy.get_source_expressions()[0:1])
return super(MyMax, copy).as_sql(compiler, connection)
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Book.objects.aggregate(MyMax("pages", "price"))
with self.assertRaisesMessage(
TypeError, "Complex annotations require an alias"
):
Book.objects.annotate(MyMax("pages", "price"))
Book.objects.aggregate(max_field=MyMax("pages", "price"))
def test_add_implementation(self):
class MySum(Sum):
pass
# test completely changing how the output is rendered
def lower_case_function_override(self, compiler, connection):
sql, params = compiler.compile(self.source_expressions[0])
substitutions = {
"function": self.function.lower(),
"expressions": sql,
"distinct": "",
}
substitutions.update(self.extra)
return self.template % substitutions, params
setattr(MySum, "as_" + connection.vendor, lower_case_function_override)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("sum("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test changing the dict and delegating
def lower_case_function_super(self, compiler, connection):
self.extra["function"] = self.function.lower()
return super(MySum, self).as_sql(compiler, connection)
setattr(MySum, "as_" + connection.vendor, lower_case_function_super)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("sum("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test overriding all parts of the template
def be_evil(self, compiler, connection):
substitutions = {"function": "MAX", "expressions": "2", "distinct": ""}
substitutions.update(self.extra)
return self.template % substitutions, ()
setattr(MySum, "as_" + connection.vendor, be_evil)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("MAX("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 2)
def test_complex_values_aggregation(self):
max_rating = Book.objects.values("rating").aggregate(
double_max_rating=Max("rating") + Max("rating")
)
self.assertEqual(max_rating["double_max_rating"], 5 * 2)
max_books_per_rating = (
Book.objects.values("rating")
.annotate(books_per_rating=Count("id") + 5)
.aggregate(Max("books_per_rating"))
)
self.assertEqual(max_books_per_rating, {"books_per_rating__max": 3 + 5})
def test_expression_on_aggregation(self):
qs = (
Publisher.objects.annotate(
price_or_median=Greatest(
Avg("book__rating", output_field=DecimalField()), Avg("book__price")
)
)
.filter(price_or_median__gte=F("num_awards"))
.order_by("num_awards")
)
self.assertQuerySetEqual(qs, [1, 3, 7, 9], lambda v: v.num_awards)
qs2 = (
Publisher.objects.annotate(
rating_or_num_awards=Greatest(
Avg("book__rating"), F("num_awards"), output_field=FloatField()
)
)
.filter(rating_or_num_awards__gt=F("num_awards"))
.order_by("num_awards")
)
self.assertQuerySetEqual(qs2, [1, 3], lambda v: v.num_awards)
def test_arguments_must_be_expressions(self):
msg = "QuerySet.aggregate() received non-expression(s): %s."
with self.assertRaisesMessage(TypeError, msg % FloatField()):
Book.objects.aggregate(FloatField())
with self.assertRaisesMessage(TypeError, msg % True):
Book.objects.aggregate(is_book=True)
with self.assertRaisesMessage(
TypeError, msg % ", ".join([str(FloatField()), "True"])
):
Book.objects.aggregate(FloatField(), Avg("price"), is_book=True)
def test_aggregation_subquery_annotation(self):
"""Subquery annotations are excluded from the GROUP BY if they are
not explicitly grouped against."""
latest_book_pubdate_qs = (
Book.objects.filter(publisher=OuterRef("pk"))
.order_by("-pubdate")
.values("pubdate")[:1]
)
publisher_qs = Publisher.objects.annotate(
latest_book_pubdate=Subquery(latest_book_pubdate_qs),
).annotate(count=Count("book"))
with self.assertNumQueries(1) as ctx:
list(publisher_qs)
self.assertEqual(ctx[0]["sql"].count("SELECT"), 2)
# The GROUP BY should not be by alias either.
self.assertEqual(ctx[0]["sql"].lower().count("latest_book_pubdate"), 1)
def test_aggregation_subquery_annotation_exists(self):
latest_book_pubdate_qs = (
Book.objects.filter(publisher=OuterRef("pk"))
.order_by("-pubdate")
.values("pubdate")[:1]
)
publisher_qs = Publisher.objects.annotate(
latest_book_pubdate=Subquery(latest_book_pubdate_qs),
count=Count("book"),
)
self.assertTrue(publisher_qs.exists())
def test_aggregation_filter_exists(self):
publishers_having_more_than_one_book_qs = (
Book.objects.values("publisher")
.annotate(cnt=Count("isbn"))
.filter(cnt__gt=1)
)
query = publishers_having_more_than_one_book_qs.query.exists()
_, _, group_by = query.get_compiler(connection=connection).pre_sql_setup()
self.assertEqual(len(group_by), 1)
def test_aggregation_exists_annotation(self):
published_books = Book.objects.filter(publisher=OuterRef("pk"))
publisher_qs = Publisher.objects.annotate(
published_book=Exists(published_books),
count=Count("book"),
).values_list("name", flat=True)
self.assertCountEqual(
list(publisher_qs),
[
"Apress",
"Morgan Kaufmann",
"Jonno's House of Books",
"Prentice Hall",
"Sams",
],
)
def test_aggregation_subquery_annotation_values(self):
"""
Subquery annotations and external aliases are excluded from the GROUP
BY if they are not selected.
"""
books_qs = (
Book.objects.annotate(
first_author_the_same_age=Subquery(
Author.objects.filter(
age=OuterRef("contact__friends__age"),
)
.order_by("age")
.values("id")[:1],
)
)
.filter(
publisher=self.p1,
first_author_the_same_age__isnull=False,
)
.annotate(
min_age=Min("contact__friends__age"),
)
.values("name", "min_age")
.order_by("name")
)
self.assertEqual(
list(books_qs),
[
{"name": "Practical Django Projects", "min_age": 34},
{
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"min_age": 29,
},
],
)
def test_aggregation_subquery_annotation_values_collision(self):
books_rating_qs = Book.objects.filter(
publisher=OuterRef("pk"),
price=Decimal("29.69"),
).values("rating")
publisher_qs = (
Publisher.objects.filter(
book__contact__age__gt=20,
name=self.p1.name,
)
.annotate(
rating=Subquery(books_rating_qs),
contacts_count=Count("book__contact"),
)
.values("rating")
.annotate(total_count=Count("rating"))
)
self.assertEqual(
list(publisher_qs),
[
{"rating": 4.0, "total_count": 2},
],
)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_subquery_annotation_multivalued(self):
"""
Subquery annotations must be included in the GROUP BY if they use
potentially multivalued relations (contain the LOOKUP_SEP).
"""
subquery_qs = Author.objects.filter(
pk=OuterRef("pk"),
book__name=OuterRef("book__name"),
).values("pk")
author_qs = Author.objects.annotate(
subquery_id=Subquery(subquery_qs),
).annotate(count=Count("book"))
self.assertEqual(author_qs.count(), Author.objects.count())
def test_aggregation_order_by_not_selected_annotation_values(self):
result_asc = [
self.b4.pk,
self.b3.pk,
self.b1.pk,
self.b2.pk,
self.b5.pk,
self.b6.pk,
]
result_desc = result_asc[::-1]
tests = [
("min_related_age", result_asc),
("-min_related_age", result_desc),
(F("min_related_age"), result_asc),
(F("min_related_age").asc(), result_asc),
(F("min_related_age").desc(), result_desc),
]
for ordering, expected_result in tests:
with self.subTest(ordering=ordering):
books_qs = (
Book.objects.annotate(
min_age=Min("authors__age"),
)
.annotate(
min_related_age=Coalesce("min_age", "contact__age"),
)
.order_by(ordering)
.values_list("pk", flat=True)
)
self.assertEqual(list(books_qs), expected_result)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_group_by_subquery_annotation(self):
"""
Subquery annotations are included in the GROUP BY if they are
grouped against.
"""
long_books_count_qs = (
Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=400,
)
.values("publisher")
.annotate(count=Count("pk"))
.values("count")
)
groups = [
Subquery(long_books_count_qs),
long_books_count_qs,
long_books_count_qs.query,
]
for group in groups:
with self.subTest(group=group.__class__.__name__):
long_books_count_breakdown = Publisher.objects.values_list(
group,
).annotate(total=Count("*"))
self.assertEqual(dict(long_books_count_breakdown), {None: 1, 1: 4})
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_group_by_exists_annotation(self):
"""
Exists annotations are included in the GROUP BY if they are
grouped against.
"""
long_books_qs = Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=800,
)
has_long_books_breakdown = Publisher.objects.values_list(
Exists(long_books_qs),
).annotate(total=Count("*"))
self.assertEqual(dict(has_long_books_breakdown), {True: 2, False: 3})
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_subquery_annotation_related_field(self):
publisher = Publisher.objects.create(name=self.a9.name, num_awards=2)
book = Book.objects.create(
isbn="159059999",
name="Test book.",
pages=819,
rating=2.5,
price=Decimal("14.44"),
contact=self.a9,
publisher=publisher,
pubdate=datetime.date(2019, 12, 6),
)
book.authors.add(self.a5, self.a6, self.a7)
books_qs = (
Book.objects.annotate(
contact_publisher=Subquery(
Publisher.objects.filter(
pk=OuterRef("publisher"),
name=OuterRef("contact__name"),
).values("name")[:1],
)
)
.filter(
contact_publisher__isnull=False,
)
.annotate(count=Count("authors"))
)
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(books_qs, [book])
# Outerquery SELECT, annotation SELECT, and WHERE SELECT but GROUP BY
# selected alias, if allowed.
if connection.features.allows_group_by_refs:
self.assertEqual(ctx[0]["sql"].count("SELECT"), 3)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_nested_subquery_outerref(self):
publisher_with_same_name = Publisher.objects.filter(
id__in=Subquery(
Publisher.objects.filter(
name=OuterRef(OuterRef("publisher__name")),
).values("id"),
),
).values(publisher_count=Count("id"))[:1]
books_breakdown = Book.objects.annotate(
publisher_count=Subquery(publisher_with_same_name),
authors_count=Count("authors"),
).values_list("publisher_count", flat=True)
self.assertSequenceEqual(books_breakdown, [1] * 6)
def test_aggregation_exists_multivalued_outeref(self):
self.assertCountEqual(
Publisher.objects.annotate(
books_exists=Exists(
Book.objects.filter(publisher=OuterRef("book__publisher"))
),
books_count=Count("book"),
),
Publisher.objects.all(),
)
def test_filter_in_subquery_or_aggregation(self):
"""
Filtering against an aggregate requires the usage of the HAVING clause.
If such a filter is unionized to a non-aggregate one the latter will
also need to be moved to the HAVING clause and have its grouping
columns used in the GROUP BY.
When this is done with a subquery the specialized logic in charge of
using outer reference columns to group should be used instead of the
subquery itself as the latter might return multiple rows.
"""
authors = Author.objects.annotate(
Count("book"),
).filter(Q(book__count__gt=0) | Q(pk__in=Book.objects.values("authors")))
self.assertCountEqual(authors, Author.objects.all())
def test_aggregation_random_ordering(self):
"""Random() is not included in the GROUP BY when used for ordering."""
authors = Author.objects.annotate(contact_count=Count("book")).order_by("?")
self.assertQuerySetEqual(
authors,
[
("Adrian Holovaty", 1),
("Jacob Kaplan-Moss", 1),
("Brad Dayley", 1),
("James Bennett", 1),
("Jeffrey Forcier", 1),
("Paul Bissex", 1),
("Wesley J. Chun", 1),
("Stuart Russell", 1),
("Peter Norvig", 2),
],
lambda a: (a.name, a.contact_count),
ordered=False,
)
def test_empty_result_optimization(self):
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Sum("num_awards"),
books_count=Count("book"),
),
{
"sum_awards": None,
"books_count": 0,
},
)
# Expression without empty_result_set_value forces queries to be
# executed even if they would return an empty result set.
raw_books_count = Func("book", function="COUNT")
raw_books_count.contains_aggregate = True
with self.assertNumQueries(1):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Sum("num_awards"),
books_count=raw_books_count,
),
{
"sum_awards": None,
"books_count": 0,
},
)
def test_coalesced_empty_result_set(self):
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Sum("num_awards"), 0),
)["sum_awards"],
0,
)
# Multiple expressions.
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Sum("num_awards"), None, 0),
)["sum_awards"],
0,
)
# Nested coalesce.
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Coalesce(Sum("num_awards"), None), 0),
)["sum_awards"],
0,
)
# Expression coalesce.
with self.assertNumQueries(1):
self.assertIsInstance(
Store.objects.none().aggregate(
latest_opening=Coalesce(
Max("original_opening"),
RawSQL("CURRENT_TIMESTAMP", []),
),
)["latest_opening"],
datetime.datetime,
)
def test_aggregation_default_unsupported_by_count(self):
msg = "Count does not allow default."
with self.assertRaisesMessage(TypeError, msg):
Count("age", default=0)
def test_aggregation_default_unset(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age"),
)
self.assertIsNone(result["value"])
def test_aggregation_default_zero(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=0),
)
self.assertEqual(result["value"], 0)
def test_aggregation_default_integer(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=21),
)
self.assertEqual(result["value"], 21)
def test_aggregation_default_expression(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=Value(5) * Value(7)),
)
self.assertEqual(result["value"], 35)
def test_aggregation_default_group_by(self):
qs = (
Publisher.objects.values("name")
.annotate(
books=Count("book"),
pages=Sum("book__pages", default=0),
)
.filter(books=0)
)
self.assertSequenceEqual(
qs,
[{"name": "Jonno's House of Books", "books": 0, "pages": 0}],
)
def test_aggregation_default_compound_expression(self):
# Scale rating to a percentage; default to 50% if no books published.
formula = Avg("book__rating", default=2.5) * 20.0
queryset = Publisher.objects.annotate(rating=formula).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "rating"),
[
{"name": "Apress", "rating": 85.0},
{"name": "Jonno's House of Books", "rating": 50.0},
{"name": "Morgan Kaufmann", "rating": 100.0},
{"name": "Prentice Hall", "rating": 80.0},
{"name": "Sams", "rating": 60.0},
],
)
def test_aggregation_default_using_time_from_python(self):
expr = Min(
"store__friday_night_closing",
filter=~Q(store__name="Amazon.com"),
default=datetime.time(17),
)
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL & MariaDB.
expr.default = Cast(expr.default, TimeField())
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{"isbn": "013235613", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "013790395",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "067232959", "oldest_store_opening": datetime.time(17)},
{"isbn": "155860191", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "159059725",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "159059996", "oldest_store_opening": datetime.time(21, 30)},
],
)
def test_aggregation_default_using_time_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min(
"store__friday_night_closing",
filter=~Q(store__name="Amazon.com"),
default=TruncHour(NowUTC(), output_field=TimeField()),
)
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{"isbn": "013235613", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "013790395",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "067232959", "oldest_store_opening": datetime.time(now.hour)},
{"isbn": "155860191", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "159059725",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "159059996", "oldest_store_opening": datetime.time(21, 30)},
],
)
def test_aggregation_default_using_date_from_python(self):
expr = Min("book__pubdate", default=datetime.date(1970, 1, 1))
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL & MariaDB.
expr.default = Cast(expr.default, DateField())
queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "earliest_pubdate"),
[
{"name": "Apress", "earliest_pubdate": datetime.date(2007, 12, 6)},
{
"name": "Jonno's House of Books",
"earliest_pubdate": datetime.date(1970, 1, 1),
},
{
"name": "Morgan Kaufmann",
"earliest_pubdate": datetime.date(1991, 10, 15),
},
{
"name": "Prentice Hall",
"earliest_pubdate": datetime.date(1995, 1, 15),
},
{"name": "Sams", "earliest_pubdate": datetime.date(2008, 3, 3)},
],
)
def test_aggregation_default_using_date_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min("book__pubdate", default=TruncDate(NowUTC()))
queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "earliest_pubdate"),
[
{"name": "Apress", "earliest_pubdate": datetime.date(2007, 12, 6)},
{"name": "Jonno's House of Books", "earliest_pubdate": now.date()},
{
"name": "Morgan Kaufmann",
"earliest_pubdate": datetime.date(1991, 10, 15),
},
{
"name": "Prentice Hall",
"earliest_pubdate": datetime.date(1995, 1, 15),
},
{"name": "Sams", "earliest_pubdate": datetime.date(2008, 3, 3)},
],
)
def test_aggregation_default_using_datetime_from_python(self):
expr = Min(
"store__original_opening",
filter=~Q(store__name="Amazon.com"),
default=datetime.datetime(1970, 1, 1),
)
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL & MariaDB.
expr.default = Cast(expr.default, DateTimeField())
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{
"isbn": "013235613",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "013790395",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "067232959",
"oldest_store_opening": datetime.datetime(1970, 1, 1),
},
{
"isbn": "155860191",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "159059725",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "159059996",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
],
)
def test_aggregation_default_using_datetime_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min(
"store__original_opening",
filter=~Q(store__name="Amazon.com"),
default=TruncHour(NowUTC(), output_field=DateTimeField()),
)
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{
"isbn": "013235613",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "013790395",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "067232959",
"oldest_store_opening": now.replace(
minute=0, second=0, microsecond=0, tzinfo=None
),
},
{
"isbn": "155860191",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "159059725",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "159059996",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
],
)
def test_aggregation_default_using_duration_from_python(self):
result = Publisher.objects.filter(num_awards__gt=3).aggregate(
value=Sum("duration", default=datetime.timedelta(0)),
)
self.assertEqual(result["value"], datetime.timedelta(0))
def test_aggregation_default_using_duration_from_database(self):
result = Publisher.objects.filter(num_awards__gt=3).aggregate(
value=Sum("duration", default=Now() - Now()),
)
self.assertEqual(result["value"], datetime.timedelta(0))
def test_aggregation_default_using_decimal_from_python(self):
result = Book.objects.filter(rating__lt=3.0).aggregate(
value=Sum("price", default=Decimal("0.00")),
)
self.assertEqual(result["value"], Decimal("0.00"))
def test_aggregation_default_using_decimal_from_database(self):
result = Book.objects.filter(rating__lt=3.0).aggregate(
value=Sum("price", default=Pi()),
)
self.assertAlmostEqual(result["value"], Decimal.from_float(math.pi), places=6)
def test_aggregation_default_passed_another_aggregate(self):
result = Book.objects.aggregate(
value=Sum("price", filter=Q(rating__lt=3.0), default=Avg("pages") / 10.0),
)
self.assertAlmostEqual(result["value"], Decimal("61.72"), places=2)
def test_aggregation_default_after_annotation(self):
result = Publisher.objects.annotate(
double_num_awards=F("num_awards") * 2,
).aggregate(value=Sum("double_num_awards", default=0))
self.assertEqual(result["value"], 40)
def test_aggregation_default_not_in_aggregate(self):
result = Publisher.objects.annotate(
avg_rating=Avg("book__rating", default=2.5),
).aggregate(Sum("num_awards"))
self.assertEqual(result["num_awards__sum"], 20)
def test_exists_none_with_aggregate(self):
qs = Book.objects.annotate(
count=Count("id"),
exists=Exists(Author.objects.none()),
)
self.assertEqual(len(qs), 6)
def test_alias_sql_injection(self):
crafted_alias = """injected_name" from "aggregation_author"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Author.objects.aggregate(**{crafted_alias: Avg("age")})
def test_exists_extra_where_with_aggregate(self):
qs = Book.objects.annotate(
count=Count("id"),
exists=Exists(Author.objects.extra(where=["1=0"])),
)
self.assertEqual(len(qs), 6)
def test_aggregation_over_annotation_shared_alias(self):
self.assertEqual(
Publisher.objects.annotate(agg=Count("book__authors")).aggregate(
agg=Count("agg"),
),
{"agg": 5},
)
class AggregateAnnotationPruningTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(age=1)
cls.a2 = Author.objects.create(age=2)
cls.p1 = Publisher.objects.create(num_awards=1)
cls.p2 = Publisher.objects.create(num_awards=0)
cls.b1 = Book.objects.create(
name="b1",
publisher=cls.p1,
pages=100,
rating=4.5,
price=10,
contact=cls.a1,
pubdate=datetime.date.today(),
)
cls.b1.authors.add(cls.a1)
cls.b2 = Book.objects.create(
name="b2",
publisher=cls.p2,
pages=1000,
rating=3.2,
price=50,
contact=cls.a2,
pubdate=datetime.date.today(),
)
cls.b2.authors.add(cls.a1, cls.a2)
def test_unused_aliased_aggregate_pruned(self):
with CaptureQueriesContext(connection) as ctx:
cnt = Book.objects.alias(
authors_count=Count("authors"),
).count()
self.assertEqual(cnt, 2)
sql = ctx.captured_queries[0]["sql"].lower()
self.assertEqual(sql.count("select"), 2, "Subquery wrapping required")
self.assertNotIn("authors_count", sql)
def test_non_aggregate_annotation_pruned(self):
with CaptureQueriesContext(connection) as ctx:
Book.objects.annotate(
name_lower=Lower("name"),
).count()
sql = ctx.captured_queries[0]["sql"].lower()
self.assertEqual(sql.count("select"), 1, "No subquery wrapping required")
self.assertNotIn("name_lower", sql)
def test_unreferenced_aggregate_annotation_pruned(self):
with CaptureQueriesContext(connection) as ctx:
cnt = Book.objects.annotate(
authors_count=Count("authors"),
).count()
self.assertEqual(cnt, 2)
sql = ctx.captured_queries[0]["sql"].lower()
self.assertEqual(sql.count("select"), 2, "Subquery wrapping required")
self.assertNotIn("authors_count", sql)
def test_referenced_aggregate_annotation_kept(self):
with CaptureQueriesContext(connection) as ctx:
Book.objects.annotate(
authors_count=Count("authors"),
).aggregate(Avg("authors_count"))
sql = ctx.captured_queries[0]["sql"].lower()
self.assertEqual(sql.count("select"), 2, "Subquery wrapping required")
self.assertEqual(sql.count("authors_count"), 2)
|
d5b7fb6fc133488acf5f5fb585d86a0f286769ccf6d880cbaf67b4400164f1fb | import datetime
from decimal import Decimal
from django.db.models import (
Avg,
Case,
Count,
Exists,
F,
Max,
OuterRef,
Q,
StdDev,
Subquery,
Sum,
Variance,
When,
)
from django.test import TestCase
from django.test.utils import Approximate
from .models import Author, Book, Publisher
class FilteredAggregateTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="test", age=40)
cls.a2 = Author.objects.create(name="test2", age=60)
cls.a3 = Author.objects.create(name="test3", age=100)
cls.p1 = Publisher.objects.create(
name="Apress", num_awards=3, duration=datetime.timedelta(days=1)
)
cls.b1 = Book.objects.create(
isbn="159059725",
name="The Definitive Guide to Django: Web Development Done Right",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6),
)
cls.b2 = Book.objects.create(
isbn="067232959",
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
rating=3.0,
price=Decimal("23.09"),
contact=cls.a2,
publisher=cls.p1,
pubdate=datetime.date(2008, 3, 3),
)
cls.b3 = Book.objects.create(
isbn="159059996",
name="Practical Django Projects",
pages=600,
rating=4.5,
price=Decimal("29.69"),
contact=cls.a3,
publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23),
)
cls.a1.friends.add(cls.a2)
cls.a1.friends.add(cls.a3)
cls.b1.authors.add(cls.a1)
cls.b1.authors.add(cls.a3)
cls.b2.authors.add(cls.a2)
cls.b3.authors.add(cls.a3)
def test_filtered_aggregates(self):
agg = Sum("age", filter=Q(name__startswith="test"))
self.assertEqual(Author.objects.aggregate(age=agg)["age"], 200)
def test_filtered_numerical_aggregates(self):
for aggregate, expected_result in (
(Avg, Approximate(66.7, 1)),
(StdDev, Approximate(24.9, 1)),
(Variance, Approximate(622.2, 1)),
):
with self.subTest(aggregate=aggregate.__name__):
agg = aggregate("age", filter=Q(name__startswith="test"))
self.assertEqual(
Author.objects.aggregate(age=agg)["age"], expected_result
)
def test_double_filtered_aggregates(self):
agg = Sum("age", filter=Q(Q(name="test2") & ~Q(name="test")))
self.assertEqual(Author.objects.aggregate(age=agg)["age"], 60)
def test_excluded_aggregates(self):
agg = Sum("age", filter=~Q(name="test2"))
self.assertEqual(Author.objects.aggregate(age=agg)["age"], 140)
def test_related_aggregates_m2m(self):
agg = Sum("friends__age", filter=~Q(friends__name="test"))
self.assertEqual(
Author.objects.filter(name="test").aggregate(age=agg)["age"], 160
)
def test_related_aggregates_m2m_and_fk(self):
q = Q(friends__book__publisher__name="Apress") & ~Q(friends__name="test3")
agg = Sum("friends__book__pages", filter=q)
self.assertEqual(
Author.objects.filter(name="test").aggregate(pages=agg)["pages"], 528
)
def test_plain_annotate(self):
agg = Sum("book__pages", filter=Q(book__rating__gt=3))
qs = Author.objects.annotate(pages=agg).order_by("pk")
self.assertSequenceEqual([a.pages for a in qs], [447, None, 1047])
def test_filtered_aggregate_on_annotate(self):
pages_annotate = Sum("book__pages", filter=Q(book__rating__gt=3))
age_agg = Sum("age", filter=Q(total_pages__gte=400))
aggregated = Author.objects.annotate(total_pages=pages_annotate).aggregate(
summed_age=age_agg
)
self.assertEqual(aggregated, {"summed_age": 140})
def test_case_aggregate(self):
agg = Sum(
Case(When(friends__age=40, then=F("friends__age"))),
filter=Q(friends__name__startswith="test"),
)
self.assertEqual(Author.objects.aggregate(age=agg)["age"], 80)
def test_sum_star_exception(self):
msg = "Star cannot be used with filter. Please specify a field."
with self.assertRaisesMessage(ValueError, msg):
Count("*", filter=Q(age=40))
def test_filtered_reused_subquery(self):
qs = Author.objects.annotate(
older_friends_count=Count("friends", filter=Q(friends__age__gt=F("age"))),
).filter(
older_friends_count__gte=2,
)
self.assertEqual(qs.get(pk__in=qs.values("pk")), self.a1)
def test_filtered_aggregate_ref_annotation(self):
aggs = Author.objects.annotate(double_age=F("age") * 2).aggregate(
cnt=Count("pk", filter=Q(double_age__gt=100)),
)
self.assertEqual(aggs["cnt"], 2)
def test_filtered_aggregate_ref_subquery_annotation(self):
aggs = Author.objects.annotate(
earliest_book_year=Subquery(
Book.objects.filter(
contact__pk=OuterRef("pk"),
)
.order_by("pubdate")
.values("pubdate__year")[:1]
),
).aggregate(
cnt=Count("pk", filter=Q(earliest_book_year=2008)),
)
self.assertEqual(aggs["cnt"], 2)
def test_filtered_aggregate_ref_multiple_subquery_annotation(self):
aggregate = (
Book.objects.values("publisher")
.annotate(
has_authors=Exists(
Book.authors.through.objects.filter(book=OuterRef("pk")),
),
authors_have_other_books=Exists(
Book.objects.filter(
authors__in=Author.objects.filter(
book_contact_set=OuterRef(OuterRef("pk")),
)
).exclude(pk=OuterRef("pk")),
),
)
.aggregate(
max_rating=Max(
"rating",
filter=Q(has_authors=True, authors_have_other_books=False),
)
)
)
self.assertEqual(aggregate, {"max_rating": 4.5})
def test_filtered_aggregate_on_exists(self):
aggregate = Book.objects.values("publisher").aggregate(
max_rating=Max(
"rating",
filter=Exists(
Book.authors.through.objects.filter(book=OuterRef("pk")),
),
),
)
self.assertEqual(aggregate, {"max_rating": 4.5})
def test_filtered_aggregate_empty_condition(self):
book = Book.objects.annotate(
authors_count=Count(
"authors",
filter=Q(authors__in=[]),
),
).get(pk=self.b1.pk)
self.assertEqual(book.authors_count, 0)
aggregate = Book.objects.aggregate(
max_rating=Max("rating", filter=Q(rating__in=[]))
)
self.assertEqual(aggregate, {"max_rating": None})
def test_filtered_aggregate_full_condition(self):
book = Book.objects.annotate(
authors_count=Count(
"authors",
filter=~Q(authors__in=[]),
),
).get(pk=self.b1.pk)
self.assertEqual(book.authors_count, 2)
aggregate = Book.objects.aggregate(
max_rating=Max("rating", filter=~Q(rating__in=[]))
)
self.assertEqual(aggregate, {"max_rating": 4.5})
|
ad70c2f603fdd6ab96c8ba0775e35803ed41347f1f60d3236b3dd920a995d41f | import datetime
import os
import re
import unittest
from unittest import mock
from urllib.parse import parse_qsl, urljoin, urlparse
try:
import zoneinfo
except ImportError:
from backports import zoneinfo
try:
import pytz
except ImportError:
pytz = None
from django.contrib import admin
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.db import connection
from django.forms.utils import ErrorList
from django.template.response import TemplateResponse
from django.test import (
TestCase,
modify_settings,
override_settings,
skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix
from django.urls import NoReverseMatch, resolve, reverse
from django.utils import formats, translation
from django.utils.cache import get_max_age
from django.utils.encoding import iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor,
AdminOrderedAdminMethod,
AdminOrderedCallable,
AdminOrderedField,
AdminOrderedModelMethod,
Album,
Answer,
Answer2,
Article,
BarAccount,
Book,
Bookmark,
Box,
Category,
Chapter,
ChapterXtra1,
ChapterXtra2,
Character,
Child,
Choice,
City,
Collector,
Color,
ComplexSortedPerson,
CoverLetter,
CustomArticle,
CyclicOne,
CyclicTwo,
DooHickey,
Employee,
EmptyModel,
Fabric,
FancyDoodad,
FieldOverridePost,
FilteredManager,
FooAccount,
FoodDelivery,
FunkyTag,
Gallery,
Grommet,
Inquisition,
Language,
Link,
MainPrepopulated,
Media,
ModelWithStringPrimaryKey,
OtherStory,
Paper,
Parent,
ParentWithDependentChildren,
ParentWithUUIDPK,
Person,
Persona,
Picture,
Pizza,
Plot,
PlotDetails,
PluggableSearchPerson,
Podcast,
Post,
PrePopulatedPost,
Promo,
Question,
ReadablePizza,
ReadOnlyPizza,
ReadOnlyRelatedField,
Recommendation,
Recommender,
RelatedPrepopulated,
RelatedWithUUIDPKModel,
Report,
Restaurant,
RowLevelChangePermissionModel,
SecretHideout,
Section,
ShortMessage,
Simple,
Song,
State,
Story,
SuperSecretHideout,
SuperVillain,
Telegram,
TitleTranslation,
Topping,
Traveler,
UnchangeableObject,
UndeletableObject,
UnorderedObject,
UserProxy,
Villain,
Vodcast,
Whatsit,
Widget,
Worker,
WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
MULTIPART_ENCTYPE = 'enctype="multipart/form-data"'
def make_aware_datetimes(dt, iana_key):
"""Makes one aware datetime for each supported time zone provider."""
yield dt.replace(tzinfo=zoneinfo.ZoneInfo(iana_key))
if pytz is not None:
yield pytz.timezone(iana_key).localize(dt, is_dst=None)
class AdminFieldExtractionMixin:
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
fields = []
for fieldset in response.context["adminform"]:
for field_line in fieldset:
fields.extend(field_line)
return fields
def get_admin_readonly_fields(self, response):
"""
Return the readonly fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response) if f.is_readonly]
def get_admin_readonly_field(self, response, field_name):
"""
Return the readonly field for the given field_name.
"""
admin_readonly_fields = self.get_admin_readonly_fields(response)
for field in admin_readonly_fields:
if field.field["name"] == field_name:
return field
@override_settings(ROOT_URLCONF="admin_views.urls", USE_I18N=True, LANGUAGE_CODE="en")
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
title="Article 1",
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
title="Article 2",
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.color1 = Color.objects.create(value="Red", warm=True)
cls.color2 = Color.objects.create(value="Orange", warm=True)
cls.color3 = Color.objects.create(value="Blue", warm=False)
cls.color4 = Color.objects.create(value="Green", warm=False)
cls.fab1 = Fabric.objects.create(surface="x")
cls.fab2 = Fabric.objects.create(surface="y")
cls.fab3 = Fabric.objects.create(surface="plain")
cls.b1 = Book.objects.create(name="Book 1")
cls.b2 = Book.objects.create(name="Book 2")
cls.pro1 = Promo.objects.create(name="Promo 1", book=cls.b1)
cls.pro1 = Promo.objects.create(name="Promo 2", book=cls.b2)
cls.chap1 = Chapter.objects.create(
title="Chapter 1", content="[ insert contents here ]", book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title="Chapter 2", content="[ insert contents here ]", book=cls.b1
)
cls.chap3 = Chapter.objects.create(
title="Chapter 1", content="[ insert contents here ]", book=cls.b2
)
cls.chap4 = Chapter.objects.create(
title="Chapter 2", content="[ insert contents here ]", book=cls.b2
)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra="ChapterXtra1 1")
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra="ChapterXtra1 2")
Actor.objects.create(name="Palin", age=27)
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.force_login(self.superuser)
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(
response.content.index(text1.encode()),
response.content.index(text2.encode()),
(failing_msg or "")
+ "\nResponse:\n"
+ response.content.decode(response.charset),
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse("admin:admin_views_article_add")
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(
reverse("admin:admin_views_section_add"), {"name": "My Section"}
)
self.assertContains(
response,
'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response",
)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
GET on the change_view (when passing a string as the PK argument for a
model with an integer PK field) redirects to the index page with a
message saying the object doesn't exist.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(quote("abc/<b>"),)),
follow=True,
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["section with ID “abc/<b>” doesn’t exist. Perhaps it was deleted?"],
)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)).replace(
"change/", ""
)
)
self.assertRedirects(
response, reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
def test_basic_inheritance_GET_string_PK(self):
"""
GET on the change_view (for inherited models) redirects to the index
page with a message saying the object doesn't exist.
"""
response = self.client.get(
reverse("admin:admin_views_supervillain_change", args=("abc",)), follow=True
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["super villain with ID “abc” doesn’t exist. Perhaps it was deleted?"],
)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse("admin:admin_views_section_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""HTTP response from a popup is properly escaped."""
post_data = {
IS_POPUP_VAR: "1",
"title": "title with a new\nline",
"content": "some content",
"date_0": "2010-09-10",
"date_1": "14:55:39",
}
response = self.client.post(reverse("admin:admin_views_article_add"), post_data)
self.assertContains(response, "title with a new\\nline")
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse("admin:admin_views_section_change", args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update(
{
"_saveasnew": "Save+as+new",
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
}
)
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update(
{
"_saveasnew": "Save+as+new",
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
}
)
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), post_data
)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest("id").article_set.count(), 2)
def test_change_list_column_field_classes(self):
response = self.client.get(reverse("admin:admin_views_article_changelist"))
# callables display the callable name.
self.assertContains(response, "column-callable_year")
self.assertContains(response, "field-callable_year")
# lambdas display as "lambda" + index that they appear in list_display.
self.assertContains(response, "column-lambda8")
self.assertContains(response, "field-lambda8")
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": 2}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_property(self):
"""
Sort on a list_display field that is a property (column 10 is
a property in Article model).
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": 10}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on property are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on property are out of order.",
)
def test_change_list_sorting_callable_query_expression(self):
"""Query expressions may be used for admin_order_field."""
tests = [
("order_by_expression", 9),
("order_by_f_expression", 12),
("order_by_orderby_expression", 13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse("admin:admin_views_article_changelist"),
{"o": index},
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_callable_query_expression_reverse(self):
tests = [
("order_by_expression", -9),
("order_by_f_expression", -12),
("order_by_orderby_expression", -13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse("admin:admin_views_article_changelist"),
{"o": index},
)
self.assertContentBefore(
response,
"Middle content",
"Oldest content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Newest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "-3"}
)
self.assertContentBefore(
response,
"Newest content",
"Middle content",
"Results of sorting on Model method are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Oldest content",
"Results of sorting on Model method are out of order.",
)
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "4"}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on ModelAdmin method are out of order.",
)
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
td = '<td class="field-model_property_year">%s</td>'
td_2000, td_2008, td_2009 = td % 2000, td % 2008, td % 2009
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "6"}
)
self.assertContentBefore(
response,
td_2009,
td_2008,
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
td_2008,
td_2000,
"Results of sorting on ModelAdmin method are out of order.",
)
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "-6"}
)
self.assertContentBefore(
response,
td_2000,
td_2008,
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
td_2008,
td_2009,
"Results of sorting on ModelAdmin method are out of order.",
)
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse("admin:admin_views_person_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_person_change", args=(p2.pk,))
link3 = reverse("admin:admin_views_person_change", args=(p3.pk,))
# Sort by name, gender
response = self.client.get(
reverse("admin:admin_views_person_changelist"), {"o": "1.2"}
)
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(
reverse("admin:admin_views_person_changelist"), {"o": "-2.1"}
)
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse("admin:admin_views_person_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_person_change", args=(p2.pk,))
link3 = reverse("admin:admin_views_person_change", args=(p3.pk,))
response = self.client.get(reverse("admin:admin_views_person_changelist"), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso="ur", name="Urdu")
l2 = Language.objects.create(iso="ar", name="Arabic")
link1 = reverse("admin:admin_views_language_change", args=(quote(l1.pk),))
link2 = reverse("admin:admin_views_language_change", args=(quote(l2.pk),))
response = self.client.get(reverse("admin:admin_views_language_changelist"), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(
reverse("admin:admin_views_language_changelist"), {"o": "-1"}
)
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse("admin:admin_views_podcast_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_podcast_change", args=(p2.pk,))
response = self.client.get(reverse("admin:admin_views_podcast_changelist"), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# The changelist displays the correct columns if two columns correspond
# to the same ordering field.
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse("admin:admin_views_podcast_change", args=(quote(p1.pk),))
link2 = reverse("admin:admin_views_podcast_change", args=(quote(p2.pk),))
response = self.client.get(reverse("admin:admin_views_podcast_changelist"), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse("admin:admin_views_complexsortedperson_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_complexsortedperson_change", args=(p2.pk,))
response = self.client.get(
reverse("admin:admin_views_complexsortedperson_changelist"), {}
)
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, "Name")
self.assertContains(response, "Colored name")
# Check order
self.assertContentBefore(response, "Name", "Colored name")
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
The admin shows default sort indicators for all kinds of 'ordering'
fields: field names, method on the model admin and model itself, and
other callables. See #17252.
"""
models = [
(AdminOrderedField, "adminorderedfield"),
(AdminOrderedModelMethod, "adminorderedmodelmethod"),
(AdminOrderedAdminMethod, "adminorderedadminmethod"),
(AdminOrderedCallable, "adminorderedcallable"),
]
for model, url in models:
model.objects.create(stuff="The Last Item", order=3)
model.objects.create(stuff="The First Item", order=1)
model.objects.create(stuff="The Middle Item", order=2)
response = self.client.get(
reverse("admin:admin_views_%s_changelist" % url), {}
)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(
response.context["cl"].get_ordering_field_columns(), {2: "asc"}
)
# Check order of records.
self.assertContentBefore(response, "The First Item", "The Middle Item")
self.assertContentBefore(response, "The Middle Item", "The Last Item")
def test_has_related_field_in_list_display_fk(self):
"""Joins shouldn't be performed for <FK>_id fields in list display."""
state = State.objects.create(name="Karnataka")
City.objects.create(state=state, name="Bangalore")
response = self.client.get(reverse("admin:admin_views_city_changelist"), {})
response.context["cl"].list_display = ["id", "name", "state"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), True)
response.context["cl"].list_display = ["id", "name", "state_id"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), False)
def test_has_related_field_in_list_display_o2o(self):
"""Joins shouldn't be performed for <O2O>_id fields in list display."""
media = Media.objects.create(name="Foo")
Vodcast.objects.create(media=media)
response = self.client.get(reverse("admin:admin_views_vodcast_changelist"), {})
response.context["cl"].list_display = ["media"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), True)
response.context["cl"].list_display = ["media_id"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), False)
def test_limited_filter(self):
"""
Admin changelist filters do not contain objects excluded via
limit_choices_to.
"""
response = self.client.get(reverse("admin:admin_views_thing_changelist"))
self.assertContains(
response,
'<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view",
)
self.assertNotContains(
response,
'<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to",
)
def test_relation_spanning_filters(self):
changelist_url = reverse("admin:admin_views_chapterxtra1_changelist")
response = self.client.get(changelist_url)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
"chap__id__exact": {
"values": [c.id for c in Chapter.objects.all()],
"test": lambda obj, value: obj.chap.id == value,
},
"chap__title": {
"values": [c.title for c in Chapter.objects.all()],
"test": lambda obj, value: obj.chap.title == value,
},
"chap__book__id__exact": {
"values": [b.id for b in Book.objects.all()],
"test": lambda obj, value: obj.chap.book.id == value,
},
"chap__book__name": {
"values": [b.name for b in Book.objects.all()],
"test": lambda obj, value: obj.chap.book.name == value,
},
"chap__book__promo__id__exact": {
"values": [p.id for p in Promo.objects.all()],
"test": lambda obj, value: obj.chap.book.promo_set.filter(
id=value
).exists(),
},
"chap__book__promo__name": {
"values": [p.name for p in Promo.objects.all()],
"test": lambda obj, value: obj.chap.book.promo_set.filter(
name=value
).exists(),
},
# A forward relation (book) after a reverse relation (promo).
"guest_author__promo__book__id__exact": {
"values": [p.id for p in Book.objects.all()],
"test": lambda obj, value: obj.guest_author.promo_set.filter(
book=value
).exists(),
},
}
for filter_path, params in filters.items():
for value in params["values"]:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s"' % query_string)
# ensure link works
filtered_response = self.client.get(
"%s?%s" % (changelist_url, query_string)
)
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context["cl"].queryset.all():
self.assertTrue(params["test"](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse("admin:admin_views_thing_changelist")
response = self.client.get(changelist_url, {"notarealfield": "5"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {"notarealfield__whatever": "5"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
response = self.client.get(
changelist_url, {"color__id__exact": "StringNotInteger!"}
)
self.assertRedirects(response, "%s?e=1" % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {"pub_date__gte": "foo"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(
title="I Could Go Anywhere",
content="Versatile",
date=datetime.datetime.now(),
)
changelist_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(changelist_url)
self.assertContains(response, "4 articles")
response = self.client.get(changelist_url, {"section__isnull": "false"})
self.assertContains(response, "3 articles")
response = self.client.get(changelist_url, {"section__isnull": "0"})
self.assertContains(response, "3 articles")
response = self.client.get(changelist_url, {"section__isnull": "true"})
self.assertContains(response, "1 article")
response = self.client.get(changelist_url, {"section__isnull": "1"})
self.assertContains(response, "1 article")
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse("admin:admin_views_article_changelist"))
self.assertContains(
response,
'<form id="logout-form" method="post" action="%s">'
% reverse("admin:logout"),
)
self.assertContains(
response, '<a href="%s">' % reverse("admin:password_change")
)
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse("admin:admin_views_fabric_change", args=(self.fab1.pk,))
link2 = reverse("admin:admin_views_fabric_change", args=(self.fab2.pk,))
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(
response,
'<a href="%s">Horizontal</a>' % link1,
msg_prefix=fail_msg,
html=True,
)
self.assertContains(
response,
'<a href="%s">Vertical</a>' % link2,
msg_prefix=fail_msg,
html=True,
)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(
response,
'<a href="?surface__exact=x">Horizontal</a>',
msg_prefix=fail_msg,
html=True,
)
self.assertContains(
response,
'<a href="?surface__exact=y">Vertical</a>',
msg_prefix=fail_msg,
html=True,
)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse("admin:admin_views_post_changelist"))
self.assertContains(response, "icon-unknown.svg")
def test_display_decorator_with_boolean_and_empty_value(self):
msg = (
"The boolean and empty_value arguments to the @display decorator "
"are mutually exclusive."
)
with self.assertRaisesMessage(ValueError, msg):
class BookAdmin(admin.ModelAdmin):
@admin.display(boolean=True, empty_value="(Missing)")
def is_published(self, obj):
return obj.publish_date is not None
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE="fr"), translation.override("en-us"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertNotContains(response, "Choisir une heure")
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="fr"), translation.override("none"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertContains(response, "Choisir une heure")
def test_jsi18n_with_context(self):
response = self.client.get(reverse("admin-extra-context:jsi18n"))
self.assertEqual(response.status_code, 200)
def test_jsi18n_format_fallback(self):
"""
The JavaScript i18n view doesn't return localized date/time formats
when the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="ru"), translation.override("none"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertNotContains(response, "%d.%m.%Y %H:%M:%S")
self.assertContains(response, "%Y-%m-%d %H:%M:%S")
def test_disallowed_filtering(self):
with self.assertLogs("django.security.DisallowedModelAdminLookup", "ERROR"):
response = self.client.get(
"%s?owner__email__startswith=fuzzy"
% reverse("admin:admin_views_album_changelist")
)
self.assertEqual(response.status_code, 400)
# Filters are allowed if explicitly included in list_filter
response = self.client.get(
"%s?color__value__startswith=red"
% reverse("admin:admin_views_thing_changelist")
)
self.assertEqual(response.status_code, 200)
response = self.client.get(
"%s?color__value=red" % reverse("admin:admin_views_thing_changelist")
)
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to allow them in list_filter or date_hierarchy.
response = self.client.get(
"%s?age__gt=30" % reverse("admin:admin_views_person_changelist")
)
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(
name="Anonymous", gender=1, age=22, alive=True, code="123"
)
e2 = Employee.objects.create(
name="Visitor", gender=2, age=19, alive=True, code="124"
)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse("admin:admin_views_workhour_changelist"))
self.assertContains(response, "employee__person_ptr__exact")
response = self.client.get(
"%s?employee__person_ptr__exact=%d"
% (reverse("admin:admin_views_workhour_changelist"), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
url = reverse("admin:admin_views_section_changelist")
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(url, {TO_FIELD_VAR: "missing_field"})
self.assertEqual(response.status_code, 400)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(
reverse("admin:admin_views_section_changelist"), {TO_FIELD_VAR: "name"}
)
self.assertEqual(response.status_code, 400)
# Primary key should always be allowed, even if the referenced model
# isn't registered.
response = self.client.get(
reverse("admin:admin_views_notreferenced_changelist"), {TO_FIELD_VAR: "id"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field referenced by another model though a m2m should be
# allowed.
response = self.client.get(
reverse("admin:admin_views_recipe_changelist"), {TO_FIELD_VAR: "rname"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field referenced through a reverse m2m relationship
# should be allowed.
response = self.client.get(
reverse("admin:admin_views_ingredient_changelist"), {TO_FIELD_VAR: "iname"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field that is not referred by any other model directly
# registered to this admin site but registered through inheritance
# should be allowed.
response = self.client.get(
reverse("admin:admin_views_referencedbyparent_changelist"),
{TO_FIELD_VAR: "name"},
)
self.assertEqual(response.status_code, 200)
# Specifying a field that is only referred to by a inline of a
# registered model should be allowed.
response = self.client.get(
reverse("admin:admin_views_referencedbyinline_changelist"),
{TO_FIELD_VAR: "name"},
)
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse("admin:admin_views_referencedbygenrel_changelist")
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(url, {TO_FIELD_VAR: "object_id"})
self.assertEqual(response.status_code, 400)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(
reverse("admin:admin_views_section_add"), {TO_FIELD_VAR: "name"}
)
self.assertEqual(response.status_code, 400)
section = Section.objects.create()
url = reverse("admin:admin_views_section_change", args=(section.pk,))
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(url, {TO_FIELD_VAR: "name"})
self.assertEqual(response.status_code, 400)
url = reverse("admin:admin_views_section_delete", args=(section.pk,))
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(url, {TO_FIELD_VAR: "name"})
self.assertEqual(response.status_code, 400)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey
# pointing to this model.
url = "%s?leader__name=Palin&leader__age=27" % reverse(
"admin:admin_views_inquisition_changelist"
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get(
"%s?%s" % (reverse("admin:admin_views_actor_changelist"), IS_POPUP_VAR)
)
self.assertContains(response, 'data-popup-opener="%s"' % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username="super")
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse("admin:index"))
self.assertNotContains(
response,
reverse("admin:password_change"),
msg_prefix=(
'The "change password" link should not be displayed if a user does not '
"have a usable password."
),
)
def test_change_view_with_show_delete_extra_context(self):
"""
The 'show_delete' context variable in the admin's change view controls
the display of the delete button.
"""
instance = UndeletableObject.objects.create(name="foo")
response = self.client.get(
reverse("admin:admin_views_undeletableobject_change", args=(instance.pk,))
)
self.assertNotContains(response, "deletelink")
def test_change_view_logs_m2m_field_changes(self):
"""Changes to ManyToManyFields are included in the object's history."""
pizza = ReadablePizza.objects.create(name="Cheese")
cheese = Topping.objects.create(name="cheese")
post_data = {"name": pizza.name, "toppings": [cheese.pk]}
response = self.client.post(
reverse("admin:admin_views_readablepizza_change", args=(pizza.pk,)),
post_data,
)
self.assertRedirects(
response, reverse("admin:admin_views_readablepizza_changelist")
)
pizza_ctype = ContentType.objects.get_for_model(
ReadablePizza, for_concrete_model=False
)
log = LogEntry.objects.filter(
content_type=pizza_ctype, object_id=pizza.pk
).first()
self.assertEqual(log.get_change_message(), "Changed Toppings.")
def test_allows_attributeerror_to_bubble_up(self):
"""
AttributeErrors are allowed to bubble when raised inside a change list
view. Requires a model to be created so there's something to display.
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse("admin:admin_views_simple_changelist"))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls (#20934).
"""
o = UnchangeableObject.objects.create()
response = self.client.get(
reverse("admin:admin_views_unchangeableobject_changelist")
)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(
response, '<th class="field-__str__">%s</th>' % o, html=True
)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = "/test_admin/admin/admin_views/"
confirm_good_url = reverse(
"admin:app_list", kwargs={"app_label": "admin_views"}
)
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse("admin:app_list", kwargs={"app_label": "this_should_fail"})
with self.assertRaises(NoReverseMatch):
reverse("admin:app_list", args=("admin_views2",))
def test_resolve_admin_views(self):
index_match = resolve("/test_admin/admin4/")
list_match = resolve("/test_admin/admin4/auth/user/")
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(
list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin
)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse("admin:index")
response = self.client.get(url)
self.assertEqual(response.context["site_url"], "/my-site-url/")
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
def test_date_hierarchy_empty_queryset(self):
self.assertIs(Question.objects.exists(), False)
response = self.client.get(reverse("admin:admin_views_answer2_changelist"))
self.assertEqual(response.status_code, 200)
@override_settings(TIME_ZONE="America/Sao_Paulo", USE_TZ=True)
def test_date_hierarchy_timezone_dst(self):
# This datetime doesn't exist in this timezone due to DST.
for date in make_aware_datetimes(
datetime.datetime(2016, 10, 16, 15), "America/Sao_Paulo"
):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question="Why?", expires=date)
Answer2.objects.create(question=q, answer="Because.")
response = self.client.get(
reverse("admin:admin_views_answer2_changelist")
)
self.assertContains(response, "question__expires__day=16")
self.assertContains(response, "question__expires__month=10")
self.assertContains(response, "question__expires__year=2016")
@override_settings(TIME_ZONE="America/Los_Angeles", USE_TZ=True)
def test_date_hierarchy_local_date_differ_from_utc(self):
# This datetime is 2017-01-01 in UTC.
for date in make_aware_datetimes(
datetime.datetime(2016, 12, 31, 16), "America/Los_Angeles"
):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question="Why?", expires=date)
Answer2.objects.create(question=q, answer="Because.")
response = self.client.get(
reverse("admin:admin_views_answer2_changelist")
)
self.assertContains(response, "question__expires__day=31")
self.assertContains(response, "question__expires__month=12")
self.assertContains(response, "question__expires__year=2016")
def test_sortable_by_columns_subset(self):
expected_sortable_fields = ("date", "callable_year")
expected_not_sortable_fields = (
"content",
"model_year",
"modeladmin_year",
"model_year_reversed",
"section",
)
response = self.client.get(reverse("admin6:admin_views_article_changelist"))
for field_name in expected_sortable_fields:
self.assertContains(
response, '<th scope="col" class="sortable column-%s">' % field_name
)
for field_name in expected_not_sortable_fields:
self.assertContains(
response, '<th scope="col" class="column-%s">' % field_name
)
def test_get_sortable_by_columns_subset(self):
response = self.client.get(reverse("admin6:admin_views_actor_changelist"))
self.assertContains(response, '<th scope="col" class="sortable column-age">')
self.assertContains(response, '<th scope="col" class="column-name">')
def test_sortable_by_no_column(self):
expected_not_sortable_fields = ("title", "book")
response = self.client.get(reverse("admin6:admin_views_chapter_changelist"))
for field_name in expected_not_sortable_fields:
self.assertContains(
response, '<th scope="col" class="column-%s">' % field_name
)
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_get_sortable_by_no_column(self):
response = self.client.get(reverse("admin6:admin_views_color_changelist"))
self.assertContains(response, '<th scope="col" class="column-value">')
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_app_index_context(self):
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(
response,
"<title>Admin_Views administration | Django site admin</title>",
)
self.assertEqual(response.context["title"], "Admin_Views administration")
self.assertEqual(response.context["app_label"], "admin_views")
# Models are sorted alphabetically by default.
models = [model["name"] for model in response.context["app_list"][0]["models"]]
self.assertSequenceEqual(models, sorted(models))
def test_app_index_context_reordered(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin2:app_list", args=("admin_views",)))
self.assertContains(
response,
"<title>Admin_Views administration | Django site admin</title>",
)
# Models are in reverse order.
models = [model["name"] for model in response.context["app_list"][0]["models"]]
self.assertSequenceEqual(models, sorted(models, reverse=True))
def test_change_view_subtitle_per_object(self):
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a1.pk,)),
)
self.assertContains(
response,
"<title>Article 1 | Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
self.assertContains(response, "<h2>Article 1</h2>")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a2.pk,)),
)
self.assertContains(
response,
"<title>Article 2 | Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
self.assertContains(response, "<h2>Article 2</h2>")
def test_view_subtitle_per_object(self):
viewuser = User.objects.create_user(
username="viewuser",
password="secret",
is_staff=True,
)
viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("view", Article._meta)),
)
self.client.force_login(viewuser)
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a1.pk,)),
)
self.assertContains(
response,
"<title>Article 1 | View article | Django site admin</title>",
)
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<h2>Article 1</h2>")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a2.pk,)),
)
self.assertContains(
response,
"<title>Article 2 | View article | Django site admin</title>",
)
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<h2>Article 2</h2>")
def test_formset_kwargs_can_be_overridden(self):
response = self.client.get(reverse("admin:admin_views_city_add"))
self.assertContains(response, "overridden_name")
def test_render_views_no_subtitle(self):
tests = [
reverse("admin:index"),
reverse("admin:password_change"),
reverse("admin:app_list", args=("admin_views",)),
reverse("admin:admin_views_article_delete", args=(self.a1.pk,)),
reverse("admin:admin_views_article_history", args=(self.a1.pk,)),
]
for url in tests:
with self.subTest(url=url):
with self.assertNoLogs("django.template", "DEBUG"):
self.client.get(url)
# Login must be after logout.
with self.assertNoLogs("django.template", "DEBUG"):
self.client.post(reverse("admin:logout"))
self.client.get(reverse("admin:login"))
def test_render_delete_selected_confirmation_no_subtitle(self):
post_data = {
"action": "delete_selected",
"selected_across": "0",
"index": "0",
"_selected_action": self.a1.pk,
}
with self.assertNoLogs("django.template", "DEBUG"):
self.client.post(reverse("admin:admin_views_article_changelist"), post_data)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation."
"NumericPasswordValidator"
)
},
]
)
def test_password_change_helptext(self):
response = self.client.get(reverse("admin:password_change"))
self.assertContains(
response, '<div class="help" id="id_new_password1_helptext">'
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation." "NumericPasswordValidator"
)
},
],
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
# Put this app's and the shared tests templates dirs in DIRS to
# take precedence over the admin's templates dir.
"DIRS": [
os.path.join(os.path.dirname(__file__), "templates"),
os.path.join(os.path.dirname(os.path.dirname(__file__)), "templates"),
],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(
reverse("admin:admin_views_customarticle_changelist")
)
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, "custom_admin/change_list.html")
# Test custom add form template
response = self.client.get(reverse("admin:admin_views_customarticle_add"))
self.assertTemplateUsed(response, "custom_admin/add_form.html")
# Add an article so we can test delete, change, and history views
post = self.client.post(
reverse("admin:admin_views_customarticle_add"),
{
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
},
)
self.assertRedirects(
post, reverse("admin:admin_views_customarticle_changelist")
)
self.assertEqual(CustomArticle.objects.count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(
reverse("admin:admin_views_customarticle_change", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/change_form.html")
response = self.client.get(
reverse("admin:admin_views_customarticle_delete", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/delete_confirmation.html")
response = self.client.post(
reverse("admin:admin_views_customarticle_changelist"),
data={
"index": 0,
"action": ["delete_selected"],
"_selected_action": ["1"],
},
)
self.assertTemplateUsed(
response, "custom_admin/delete_selected_confirmation.html"
)
response = self.client.get(
reverse("admin:admin_views_customarticle_history", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/object_history.html")
# A custom popup response template may be specified by
# ModelAdmin.popup_response_template.
response = self.client.post(
reverse("admin:admin_views_customarticle_add") + "?%s=1" % IS_POPUP_VAR,
{
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
IS_POPUP_VAR: "1",
},
)
self.assertEqual(response.template_name, "custom_admin/popup_response.html")
def test_extended_bodyclass_template_change_form(self):
"""
The admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_change_password_template(self):
user = User.objects.get(username="super")
response = self.client.get(
reverse("admin:auth_user_password_change", args=(user.id,))
)
# The auth/user/change_password.html template uses super in the
# bodyclass block.
self.assertContains(response, "bodyclass_consistency_check ")
# When a site has multiple passwords in the browser's password manager,
# a browser pop up asks which user the new password is for. To prevent
# this, the username is added to the change password form.
self.assertContains(
response, '<input type="text" name="username" value="super" class="hidden">'
)
# help text for passwords has an id.
self.assertContains(
response,
'<div class="help" id="id_password1_helptext"><ul><li>'
"Your password can’t be too similar to your other personal information."
"</li><li>Your password can’t be entirely numeric.</li></ul></div>",
)
self.assertContains(
response,
'<div class="help" id="id_password2_helptext">'
"Enter the same password as before, for verification.</div>",
)
def test_extended_bodyclass_template_index(self):
"""
The admin/index.html template uses block.super in the bodyclass block.
"""
response = self.client.get(reverse("admin:index"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_change_list(self):
"""
The admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse("admin:admin_views_article_changelist"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_login(self):
"""
The admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse("admin:login"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_delete_confirmation(self):
"""
The admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse("admin:auth_group_delete", args=(group.id,)))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
The admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
"action": "delete_selected",
"selected_across": "0",
"index": "0",
"_selected_action": group.id,
}
response = self.client.post(reverse("admin:auth_group_changelist"), post_data)
self.assertEqual(response.context["site_header"], "Django administration")
self.assertContains(response, "bodyclass_consistency_check ")
def test_filter_with_custom_template(self):
"""
A custom template can be used to render an admin filter.
"""
response = self.client.get(reverse("admin:admin_views_color2_changelist"))
self.assertTemplateUsed(response, "custom_filter_template.html")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_change_form_URL_has_correct_value(self):
"""
change_view has form_url in response.context
"""
response = self.client.get(
reverse(
"admin:admin_views_section_change",
args=(self.s1.pk,),
current_app=self.current_app,
)
)
self.assertIn(
"form_url", response.context, msg="form_url not present in response.context"
)
self.assertEqual(response.context["form_url"], "pony")
def test_initial_data_can_be_overridden(self):
"""
The behavior for setting initial form data can be overridden in the
ModelAdmin class. Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse("admin:admin_views_restaurant_add", current_app=self.current_app),
{"name": "test_value"},
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_js_minified_only_if_debug_is_false(self):
"""
The minified versions of the JS files are only used when DEBUG is False.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertNotContains(response, "vendor/jquery/jquery.js")
self.assertContains(response, "vendor/jquery/jquery.min.js")
self.assertContains(response, "prepopulate.js")
self.assertContains(response, "actions.js")
self.assertContains(response, "collapse.js")
self.assertContains(response, "inlines.js")
with override_settings(DEBUG=True):
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, "vendor/jquery/jquery.js")
self.assertNotContains(response, "vendor/jquery/jquery.min.js")
self.assertContains(response, "prepopulate.js")
self.assertContains(response, "actions.js")
self.assertContains(response, "collapse.js")
self.assertContains(response, "inlines.js")
@override_settings(ROOT_URLCONF="admin_views.urls")
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_as_duplication(self):
"""'save as' creates a new person"""
post_data = {"_saveasnew": "", "name": "John M", "gender": 1, "age": 42}
response = self.client.post(
reverse("admin:admin_views_person_change", args=(self.per1.pk,)), post_data
)
self.assertEqual(len(Person.objects.filter(name="John M")), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
new_person = Person.objects.latest("id")
self.assertRedirects(
response, reverse("admin:admin_views_person_change", args=(new_person.pk,))
)
def test_save_as_continue_false(self):
"""
Saving a new object using "Save as new" redirects to the changelist
instead of the change view when ModelAdmin.save_as_continue=False.
"""
post_data = {"_saveasnew": "", "name": "John M", "gender": 1, "age": 42}
url = reverse(
"admin:admin_views_person_change",
args=(self.per1.pk,),
current_app=site2.name,
)
response = self.client.post(url, post_data)
self.assertEqual(len(Person.objects.filter(name="John M")), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
self.assertRedirects(
response,
reverse("admin:admin_views_person_changelist", current_app=site2.name),
)
def test_save_as_new_with_validation_errors(self):
"""
When you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(
reverse("admin:admin_views_person_change", args=(self.per1.pk,)),
{
"_saveasnew": "",
"gender": "invalid",
"_addanother": "fail",
},
)
self.assertContains(response, "Please correct the errors below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name="Father")
child = Child.objects.create(parent=parent, name="Child")
response = self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.pk,)),
{
"_saveasnew": "Save as new",
"child_set-0-parent": parent.pk,
"child_set-0-id": child.pk,
"child_set-0-name": "Child",
"child_set-INITIAL_FORMS": 1,
"child_set-MAX_NUM_FORMS": 1000,
"child_set-MIN_NUM_FORMS": 0,
"child_set-TOTAL_FORMS": 4,
"name": "_invalid",
},
)
self.assertContains(response, "Please correct the error below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name="Father")
child = Child.objects.create(parent=parent, name="Child")
response = self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.pk,)),
{
"_saveasnew": "Save as new",
"child_set-0-parent": parent.pk,
"child_set-0-id": child.pk,
"child_set-0-name": "_invalid",
"child_set-INITIAL_FORMS": 1,
"child_set-MAX_NUM_FORMS": 1000,
"child_set-MIN_NUM_FORMS": 0,
"child_set-TOTAL_FORMS": 4,
"name": "Father",
},
)
self.assertContains(response, "Please correct the error below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
@override_settings(ROOT_URLCONF="admin_views.urls")
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse("admin2:index"), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(
reverse("admin2:login"),
{
REDIRECT_FIELD_NAME: reverse("admin2:index"),
"username": "customform",
"password": "secret",
},
follow=True,
)
self.assertIsInstance(login, TemplateResponse)
self.assertContains(login, "custom form error")
self.assertContains(login, "path/to/media.css")
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse("admin2:index"), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/login.html")
self.assertContains(response, "Hello from a custom login template")
def test_custom_admin_site_logout_template(self):
response = self.client.post(reverse("admin2:logout"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/logout.html")
self.assertContains(response, "Hello from a custom logout template")
def test_custom_admin_site_index_view_and_template(self):
response = self.client.get(reverse("admin2:index"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/index.html")
self.assertContains(response, "Hello from a custom index template *bar*")
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse("admin2:app_list", args=("admin_views",)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/app_index.html")
self.assertContains(response, "Hello from a custom app_index template")
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse("admin2:password_change"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_form.html")
self.assertContains(
response, "Hello from a custom password change form template"
)
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse("admin2:password_change"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_form.html")
self.assertContains(response, "eggs")
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse("admin2:password_change_done"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_done.html")
self.assertContains(
response, "Hello from a custom password change done template"
)
def test_custom_admin_site_view(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin2:my_view"))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.force_login(self.superuser)
su = User.objects.get(username="super")
response = self.client.get(
reverse("admin4:auth_user_password_change", args=(su.pk,))
)
self.assertEqual(response.status_code, 200)
def get_perm(Model, codename):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model, for_concrete_model=False)
return Permission.objects.get(content_type=ct, codename=codename)
@override_settings(
ROOT_URLCONF="admin_views.urls",
# Test with the admin's documented list of required context processors.
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.viewuser = User.objects.create_user(
username="viewuser", password="secret", is_staff=True
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
cls.nostaffuser = User.objects.create_user(
username="nostaff", password="secret"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
another_section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can view Articles
cls.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("view", opts))
)
# User who can add Articles
cls.adduser.user_permissions.add(
get_perm(Article, get_permission_codename("add", opts))
)
# User who can change Articles
cls.changeuser.user_permissions.add(
get_perm(Article, get_permission_codename("change", opts))
)
cls.nostaffuser.user_permissions.add(
get_perm(Article, get_permission_codename("change", opts))
)
# User who can delete Articles
cls.deleteuser.user_permissions.add(
get_perm(Article, get_permission_codename("delete", opts))
)
cls.deleteuser.user_permissions.add(
get_perm(Section, get_permission_codename("delete", Section._meta))
)
# login POST dicts
cls.index_url = reverse("admin:index")
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "super",
"password": "secret",
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "[email protected]",
"password": "secret",
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "[email protected]",
"password": "notsecret",
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "adduser",
"password": "secret",
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "changeuser",
"password": "secret",
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "deleteuser",
"password": "secret",
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse("has_permission_admin:index"),
"username": "nostaff",
"password": "secret",
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "joepublic",
"password": "secret",
}
cls.viewuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "viewuser",
"password": "secret",
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"password": "secret",
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = "%s?next=%s" % (reverse("admin:login"), reverse("admin:index"))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username="jondoe", password="secret", email="[email protected]")
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# View User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.viewuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
self.assertFormError(
login.context["form"], "username", ["This field is required."]
)
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse("admin:login"))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse("admin:index"))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"), self.joepublic_login
)
self.assertContains(login, "permission denied")
# User with permissions should be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"), self.nostaff_login
)
self.assertRedirects(login, reverse("has_permission_admin:index"))
self.assertFalse(login.context)
self.client.post(reverse("has_permission_admin:logout"))
# Staff should be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"),
{
REDIRECT_FIELD_NAME: reverse("has_permission_admin:index"),
"username": "deleteuser",
"password": "secret",
},
)
self.assertRedirects(login, reverse("has_permission_admin:index"))
self.assertFalse(login.context)
self.client.post(reverse("has_permission_admin:logout"))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = "the-answer=42"
redirect_url = "%s?%s" % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
"%s?%s" % (reverse("admin:login"), urlencode(new_next)), post_data
)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = "%s?next=%s" % (reverse("admin:login"), reverse("admin:index"))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = "You are authenticated as {}"
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, "login-form")
self.assertNotContains(response, hint_template.format(""), status_code=200)
# Non-staff user should be shown the hint
self.client.force_login(self.nostaffuser)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, "login-form")
self.assertContains(
response, hint_template.format(self.nostaffuser.username), status_code=200
)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
add_dict = {
"title": "Døm ikke",
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
# Change User should not have access to add articles
self.client.force_login(self.changeuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.post(reverse("admin:logout"))
# View User should not have access to add articles
self.client.force_login(self.viewuser)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
# Now give the user permission to add but not change.
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("add", Article._meta))
)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.context["title"], "Add article")
self.assertContains(response, "<title>Add article | Django site admin</title>")
self.assertContains(
response, '<input type="submit" value="Save and view" name="_continue">'
)
post = self.client.post(
reverse("admin:admin_views_article_add"), add_dict, follow=False
)
self.assertEqual(post.status_code, 302)
self.assertEqual(Article.objects.count(), 4)
article = Article.objects.latest("pk")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(article.pk,))
)
self.assertContains(
response,
'<li class="success">The article “Døm ikke” was added successfully.</li>',
)
article.delete()
self.client.post(reverse("admin:logout"))
# Add user may login and POST to add view, then redirect to admin root
self.client.force_login(self.adduser)
addpage = self.client.get(reverse("admin:admin_views_article_add"))
change_list_link = '› <a href="%s">Articles</a>' % reverse(
"admin:admin_views_article_changelist"
)
self.assertNotContains(
addpage,
change_list_link,
msg_prefix=(
"User restricted to add permission is given link to change list view "
"in breadcrumbs."
),
)
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, "Greetings from a created object")
self.client.post(reverse("admin:logout"))
# The addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.adduser.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.get_change_message(), "Added.")
# Super can add too, but is redirected to the change list view
self.client.force_login(self.superuser)
addpage = self.client.get(reverse("admin:admin_views_article_add"))
self.assertContains(
addpage,
change_list_link,
msg_prefix=(
"Unrestricted user is not given link to change list view in "
"breadcrumbs."
),
)
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertRedirects(post, reverse("admin:admin_views_article_changelist"))
self.assertEqual(Article.objects.count(), 5)
self.client.post(reverse("admin:logout"))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.force_login(self.joepublicuser)
# Check and make sure that if user expires, data still persists
self.client.force_login(self.superuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
@mock.patch("django.contrib.admin.options.InlineModelAdmin.has_change_permission")
def test_add_view_with_view_only_inlines(self, has_change_permission):
"""User with add permission to a section but view-only for inlines."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("add", Section._meta))
)
self.client.force_login(self.viewuser)
# Valid POST creates a new section.
data = {
"name": "New obj",
"article_set-TOTAL_FORMS": 0,
"article_set-INITIAL_FORMS": 0,
}
response = self.client.post(reverse("admin:admin_views_section_add"), data)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(Section.objects.latest("id").name, data["name"])
# InlineModelAdmin.has_change_permission()'s obj argument is always
# None during object add.
self.assertEqual(
[obj for (request, obj), _ in has_change_permission.call_args_list],
[None, None],
)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {
"title": "Ikke fordømt",
"content": "<p>edited article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
article_change_url = reverse(
"admin:admin_views_article_change", args=(self.a1.pk,)
)
article_changelist_url = reverse("admin:admin_views_article_changelist")
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.post(reverse("admin:logout"))
# view user can view articles but not make changes.
self.client.force_login(self.viewuser)
response = self.client.get(article_changelist_url)
self.assertContains(
response,
"<title>Select article to view | Django site admin</title>",
)
self.assertContains(response, "<h1>Select article to view</h1>")
self.assertEqual(response.context["title"], "Select article to view")
response = self.client.get(article_change_url)
self.assertContains(response, "<title>View article | Django site admin</title>")
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<label>Extra form field:</label>")
self.assertContains(
response,
'<a href="/test_admin/admin/admin_views/article/" class="closelink">Close'
"</a>",
)
self.assertEqual(response.context["title"], "View article")
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(
Article.objects.get(pk=self.a1.pk).content, "<p>Middle content</p>"
)
self.client.post(reverse("admin:logout"))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.context["title"], "Select article to change")
self.assertContains(
response,
"<title>Select article to change | Django site admin</title>",
)
self.assertContains(response, "<h1>Select article to change</h1>")
response = self.client.get(article_change_url)
self.assertEqual(response.context["title"], "Change article")
self.assertContains(
response,
"<title>Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(
Article.objects.get(pk=self.a1.pk).content, "<p>edited article</p>"
)
# one error in form should produce singular error message, multiple
# errors plural.
change_dict["title"] = ""
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post,
"Please correct the error below.",
msg_prefix=(
"Singular error message not found in response to post with one error"
),
)
change_dict["content"] = ""
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post,
"Please correct the errors below.",
msg_prefix=(
"Plural error message not found in response to post with multiple "
"errors"
),
)
self.client.post(reverse("admin:logout"))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
r3 = RowLevelChangePermissionModel.objects.create(id=3, name="odd id mult 3")
r6 = RowLevelChangePermissionModel.objects.create(id=6, name="even id mult 3")
change_url_1 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r1.pk,)
)
change_url_2 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r2.pk,)
)
change_url_3 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r3.pk,)
)
change_url_6 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r6.pk,)
)
logins = [
self.superuser,
self.viewuser,
self.adduser,
self.changeuser,
self.deleteuser,
]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=1).name, "odd id"
)
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=2).name, "changed"
)
self.assertRedirects(response, self.index_url)
response = self.client.get(change_url_3)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_3, {"name": "changed"})
self.assertEqual(response.status_code, 403)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=3).name,
"odd id mult 3",
)
response = self.client.get(change_url_6)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_6, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=6).name, "changed"
)
self.assertRedirects(response, self.index_url)
self.client.post(reverse("admin:logout"))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1, follow=True)
self.assertContains(response, "login-form")
response = self.client.post(
change_url_1, {"name": "changed"}, follow=True
)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=1).name, "odd id"
)
self.assertContains(response, "login-form")
response = self.client.get(change_url_2, follow=True)
self.assertContains(response, "login-form")
response = self.client.post(
change_url_2, {"name": "changed again"}, follow=True
)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=2).name, "changed"
)
self.assertContains(response, "login-form")
self.client.post(reverse("admin:logout"))
def test_change_view_without_object_change_permission(self):
"""
The object should be read-only if the user has permission to view it
and change objects of that type but not to change the current object.
"""
change_url = reverse("admin9:admin_views_article_change", args=(self.a1.pk,))
self.client.force_login(self.viewuser)
response = self.client.get(change_url)
self.assertEqual(response.context["title"], "View article")
self.assertContains(response, "<title>View article | Django site admin</title>")
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(
response,
'<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close'
"</a>",
)
def test_change_view_save_as_new(self):
"""
'Save as new' should raise PermissionDenied for users without the 'add'
permission.
"""
change_dict_save_as_new = {
"_saveasnew": "Save as new",
"title": "Ikke fordømt",
"content": "<p>edited article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
article_change_url = reverse(
"admin:admin_views_article_change", args=(self.a1.pk,)
)
# Add user can perform "Save as new".
article_count = Article.objects.count()
self.client.force_login(self.adduser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), article_count + 1)
self.client.logout()
# Change user cannot perform "Save as new" (no 'add' permission).
article_count = Article.objects.count()
self.client.force_login(self.changeuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), article_count)
# User with both add and change permissions should be redirected to the
# change page for the newly created object.
article_count = Article.objects.count()
self.client.force_login(self.superuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(Article.objects.count(), article_count + 1)
new_article = Article.objects.latest("id")
self.assertRedirects(
post, reverse("admin:admin_views_article_change", args=(new_article.pk,))
)
def test_change_view_with_view_only_inlines(self):
"""
User with change permission to a section but view-only for inlines.
"""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
# Valid POST changes the name.
data = {
"name": "Can edit name with view-only inlines",
"article_set-TOTAL_FORMS": 3,
"article_set-INITIAL_FORMS": 3,
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data["name"])
# Invalid POST reshows inlines.
del data["name"]
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
def test_change_view_with_view_only_last_inline(self):
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("view", Section._meta))
)
self.client.force_login(self.viewuser)
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
# The last inline is not marked as empty.
self.assertContains(response, 'id="article_set-2"')
def test_change_view_with_view_and_add_inlines(self):
"""User has view and add permissions on the inline model."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("add", Article._meta))
)
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 6)
# Valid POST creates a new article.
data = {
"name": "Can edit name with view-only inlines",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-3-id": [""],
"article_set-3-title": ["A title"],
"article_set-3-content": ["Added content"],
"article_set-3-date_0": ["2008-3-18"],
"article_set-3-date_1": ["11:54:58"],
"article_set-3-section": [str(self.s1.pk)],
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data["name"])
self.assertEqual(Article.objects.count(), 4)
# Invalid POST reshows inlines.
del data["name"]
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 6)
def test_change_view_with_view_and_delete_inlines(self):
"""User has view and delete permissions on the inline model."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.client.force_login(self.viewuser)
data = {
"name": "Name is required.",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-0-id": [str(self.a1.pk)],
"article_set-0-DELETE": ["on"],
}
# Inline POST details are ignored without delete permission.
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Article.objects.count(), 3)
# Deletion successful when delete permission is added.
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("delete", Article._meta))
)
data = {
"name": "Name is required.",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-0-id": [str(self.a1.pk)],
"article_set-0-DELETE": ["on"],
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Article.objects.count(), 2)
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {"post": "yes"}
delete_url = reverse("admin:admin_views_article_delete", args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# view user should not be able to delete articles
self.client.force_login(self.viewuser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.force_login(self.deleteuser)
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Greetings from a deleted object")
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_delete_view_with_no_default_permissions(self):
"""
The delete view allows users to delete collected objects without a
'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty).
"""
pizza = ReadOnlyPizza.objects.create(name="Double Cheese")
delete_url = reverse("admin:admin_views_readonlypizza_delete", args=(pizza.pk,))
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/readonlypizza/%s/" % pizza.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Read only pizzas: 1</li>")
post = self.client.post(delete_url, {"post": "yes"})
self.assertRedirects(
post, reverse("admin:admin_views_readonlypizza_changelist")
)
self.assertEqual(ReadOnlyPizza.objects.count(), 0)
def test_delete_view_nonexistent_obj(self):
self.client.force_login(self.deleteuser)
url = reverse("admin:admin_views_article_delete", args=("nonexistent",))
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["article with ID “nonexistent” doesn’t exist. Perhaps it was deleted?"],
)
def test_history_view(self):
"""History view should restrict access."""
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 403)
self.client.post(reverse("admin:logout"))
# view user can view all items
self.client.force_login(self.viewuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 200)
self.client.post(reverse("admin:logout"))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
logins = [
self.superuser,
self.viewuser,
self.adduser,
self.changeuser,
self.deleteuser,
]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl1.pk,),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl2.pk,),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.post(reverse("admin:logout"))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl1.pk,),
)
response = self.client.get(url, follow=True)
self.assertContains(response, "login-form")
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl2.pk,),
)
response = self.client.get(url, follow=True)
self.assertContains(response, "login-form")
self.client.post(reverse("admin:logout"))
def test_history_view_bad_url(self):
self.client.force_login(self.changeuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=("foo",)), follow=True
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["article with ID “foo” doesn’t exist. Perhaps it was deleted?"],
)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.force_login(self.adduser)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse("admin:admin_views_article_add")
add_link_text = "add_id_section"
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("add", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return (
response.context["adminform"]
.form.fields["section"]
.widget.can_change_related
)
self.client.force_login(self.adduser)
# The user can't change sections yet, so they shouldn't see the
# "change section" link.
url = reverse("admin:admin_views_article_add")
change_link_text = "change_id_section"
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the
# "change section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("change", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return (
response.context["adminform"]
.form.fields["sub_section"]
.widget.can_delete_related
)
self.client.force_login(self.adduser)
# The user can't delete sections yet, so they shouldn't see the
# "delete section" link.
url = reverse("admin:admin_views_article_add")
delete_link_text = "delete_id_sub_section"
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the
# "delete section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("delete", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username="super")
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, "Log out")
response = self.client.get(reverse("secure_view"), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username="super")
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, "Log out")
response = self.client.get(reverse("secure_view"), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_list_permissions(self):
"""
If a user has no module perms, the app list returns a 404.
"""
opts = Article._meta
change_user = User.objects.get(username="changeuser")
permission = get_perm(Article, get_permission_codename("change", opts))
self.client.force_login(self.changeuser)
# the user has no module permissions
change_user.user_permissions.remove(permission)
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(response.status_code, 404)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk="foo")
shortcut_url = reverse("admin:view_on_site", args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, "admin/login.html")
# Logged in? Redirect.
self.client.force_login(self.superuser)
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
self.assertRegex(response.url, "http://(testserver|example.com)/dummy/foo/")
def test_has_module_permission(self):
"""
has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.force_login(self.superuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
def test_overriding_has_module_permission(self):
"""
If has_module_permission() always returns False, the module shouldn't
be displayed on the admin index page for any users.
"""
articles = Article._meta.verbose_name_plural.title()
sections = Section._meta.verbose_name_plural.title()
index_url = reverse("admin7:index")
self.client.force_login(self.superuser)
response = self.client.get(index_url)
self.assertContains(response, sections)
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(index_url)
self.assertNotContains(response, articles)
# The app list displays Sections but not Articles as the latter has
# ModelAdmin.has_module_permission() = False.
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin7:app_list", args=("admin_views",)))
self.assertContains(response, sections)
self.assertNotContains(response, articles)
def test_post_save_message_no_forbidden_links_visible(self):
"""
Post-save message shouldn't contain a link to the change form if the
user doesn't have the change permission.
"""
self.client.force_login(self.adduser)
# Emulate Article creation for user with add-only permission.
post_data = {
"title": "Fun & games",
"content": "Some content",
"date_0": "2015-10-31",
"date_1": "16:35:00",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_article_add"), post_data, follow=True
)
self.assertContains(
response,
'<li class="success">The article “Fun & games” was added successfully.'
"</li>",
html=True,
)
@override_settings(
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminViewProxyModelPermissionsTests(TestCase):
"""Tests for proxy models permissions in the admin."""
@classmethod
def setUpTestData(cls):
cls.viewuser = User.objects.create_user(
username="viewuser", password="secret", is_staff=True
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
# Setup permissions.
opts = UserProxy._meta
cls.viewuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("view", opts))
)
cls.adduser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("add", opts))
)
cls.changeuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("change", opts))
)
cls.deleteuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("delete", opts))
)
# UserProxy instances.
cls.user_proxy = UserProxy.objects.create(
username="user_proxy", password="secret"
)
def test_add(self):
self.client.force_login(self.adduser)
url = reverse("admin:admin_views_userproxy_add")
data = {
"username": "can_add",
"password": "secret",
"date_joined_0": "2019-01-15",
"date_joined_1": "16:59:10",
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(UserProxy.objects.filter(username="can_add").exists())
def test_view(self):
self.client.force_login(self.viewuser)
response = self.client.get(reverse("admin:admin_views_userproxy_changelist"))
self.assertContains(response, "<h1>Select user proxy to view</h1>")
response = self.client.get(
reverse("admin:admin_views_userproxy_change", args=(self.user_proxy.pk,))
)
self.assertContains(response, "<h1>View user proxy</h1>")
self.assertContains(response, '<div class="readonly">user_proxy</div>')
def test_change(self):
self.client.force_login(self.changeuser)
data = {
"password": self.user_proxy.password,
"username": self.user_proxy.username,
"date_joined_0": self.user_proxy.date_joined.strftime("%Y-%m-%d"),
"date_joined_1": self.user_proxy.date_joined.strftime("%H:%M:%S"),
"first_name": "first_name",
}
url = reverse("admin:admin_views_userproxy_change", args=(self.user_proxy.pk,))
response = self.client.post(url, data)
self.assertRedirects(
response, reverse("admin:admin_views_userproxy_changelist")
)
self.assertEqual(
UserProxy.objects.get(pk=self.user_proxy.pk).first_name, "first_name"
)
def test_delete(self):
self.client.force_login(self.deleteuser)
url = reverse("admin:admin_views_userproxy_delete", args=(self.user_proxy.pk,))
response = self.client.post(url, {"post": "yes"}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists())
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
# User who can change Reports
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.changeuser.user_permissions.add(
get_perm(Report, get_permission_codename("change", Report._meta))
)
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.force_login(self.changeuser)
r = self.client.get(reverse("admin:index"))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.post(reverse("admin:logout"))
@skipUnlessDBFeature("can_defer_constraint_checks")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.v1 = Villain.objects.create(name="Adam")
cls.v2 = Villain.objects.create(name="Sue")
cls.sv1 = SuperVillain.objects.create(name="Bob")
cls.pl1 = Plot.objects.create(
name="World Domination", team_leader=cls.v1, contact=cls.v2
)
cls.pl2 = Plot.objects.create(
name="World Peace", team_leader=cls.v2, contact=cls.v2
)
cls.pl3 = Plot.objects.create(
name="Corn Conspiracy", team_leader=cls.v1, contact=cls.v1
)
cls.pd1 = PlotDetails.objects.create(details="almost finished", plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(
location="underground bunker", villain=cls.v1
)
cls.sh2 = SecretHideout.objects.create(
location="floating castle", villain=cls.sv1
)
cls.ssh1 = SuperSecretHideout.objects.create(
location="super floating castle!", supervillain=cls.sv1
)
cls.cy1 = CyclicOne.objects.create(pk=1, name="I am recursive", two_id=1)
cls.cy2 = CyclicTwo.objects.create(pk=1, name="I am recursive too", one_id=1)
def setUp(self):
self.client.force_login(self.superuser)
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>'
% (
reverse("admin:admin_views_plot_change", args=(self.pl1.pk,)),
reverse("admin:admin_views_plotdetails_change", args=(self.pd1.pk,)),
)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertRegex(response.content.decode(), pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse("admin:admin_views_cyclicone_change", args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse("admin:admin_views_cyclictwo_change", args=(self.cy2.pk,)),
)
response = self.client.get(
reverse("admin:admin_views_cyclicone_delete", args=(self.cy1.pk,))
)
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username="deleteuser")
delete_user.user_permissions.add(
get_perm(Plot, get_permission_codename("delete", Plot._meta))
)
self.client.force_login(self.deleteuser)
response = self.client.get(
reverse("admin:admin_views_plot_delete", args=(self.pl1.pk,))
)
self.assertContains(
response,
"your account doesn't have permission to delete the following types of "
"objects",
)
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(
reverse("admin:admin_views_question_delete", args=(q.pk,))
)
self.assertContains(
response, "would require deleting the following protected related objects"
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>'
% reverse("admin:admin_views_answer_change", args=(a1.pk,)),
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>'
% reverse("admin:admin_views_answer_change", args=(a2.pk,)),
)
def test_post_delete_protected(self):
"""
A POST request to delete protected objects should display the page
which says the deletion is prohibited.
"""
q = Question.objects.create(question="Why?")
Answer.objects.create(question=q, answer="Because.")
response = self.client.post(
reverse("admin:admin_views_question_delete", args=(q.pk,)), {"post": "yes"}
)
self.assertEqual(Question.objects.count(), 1)
self.assertContains(
response, "would require deleting the following protected related objects"
)
def test_restricted(self):
album = Album.objects.create(title="Amaryllis")
song = Song.objects.create(album=album, name="Unity")
response = self.client.get(
reverse("admin:admin_views_album_delete", args=(album.pk,))
)
self.assertContains(
response,
"would require deleting the following protected related objects",
)
self.assertContains(
response,
'<li>Song: <a href="%s">Unity</a></li>'
% reverse("admin:admin_views_song_change", args=(song.pk,)),
)
def test_post_delete_restricted(self):
album = Album.objects.create(title="Amaryllis")
Song.objects.create(album=album, name="Unity")
response = self.client.post(
reverse("admin:admin_views_album_delete", args=(album.pk,)),
{"post": "yes"},
)
self.assertEqual(Album.objects.count(), 1)
self.assertContains(
response,
"would require deleting the following protected related objects",
)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
"admin:admin_views_plot_change", args=(self.pl1.pk,)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertContains(response, should_contain)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v2.pk,))
)
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
"admin:admin_views_plot_change", args=(self.pl2.pk,)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v2.pk,))
)
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>'
% reverse("admin:admin_views_villain_change", args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>'
% reverse("admin:admin_views_supervillain_change", args=(self.sv1.pk,)),
"<li>Secret hideout: floating castle",
"<li>Super secret hideout: super floating castle!",
]
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.sv1.pk,))
)
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(
reverse("admin:admin_views_supervillain_delete", args=(self.sv1.pk,))
)
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name="hott")
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
"admin:admin_views_funkytag_change", args=(tag.id,)
)
response = self.client.get(
reverse("admin:admin_views_plot_delete", args=(plot.pk,))
)
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name="djangoproject")
tag = FunkyTag.objects.create(content_object=bookmark, name="django")
tag_url = reverse("admin:admin_views_funkytag_change", args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(
reverse("admin:admin_views_bookmark_delete", args=(bookmark.pk,))
)
self.assertContains(response, should_contain)
def test_delete_view_uses_get_deleted_objects(self):
"""The delete view uses ModelAdmin.get_deleted_objects()."""
book = Book.objects.create(name="Test Book")
response = self.client.get(
reverse("admin2:admin_views_book_delete", args=(book.pk,))
)
# BookAdmin.get_deleted_objects() returns custom text.
self.assertContains(response, "a deletable object")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.v1 = Villain.objects.create(name="Adam")
cls.pl3 = Plot.objects.create(
name="Corn Conspiracy", team_leader=cls.v1, contact=cls.v1
)
def setUp(self):
self.client.force_login(self.superuser)
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name="hott")
response = self.client.get(reverse("admin:admin_views_funkytag_changelist"))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(
ModelWithStringPrimaryKey
).pk
user_pk = cls.superuser.pk
LogEntry.objects.log_action(
user_pk,
content_type_pk,
cls.pk,
cls.pk,
2,
change_message="Changed something",
)
def setUp(self):
self.client.force_login(self.superuser)
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_history", args=(self.pk,)
)
)
self.assertContains(response, escape(self.pk))
self.assertContains(response, "Changed something")
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=(self.pk,)
)
)
self.assertContains(response, escape(self.pk))
def test_changelist_to_changeform_link(self):
"""
Link to the changeform of the object in changelist should use reverse()
and be quoted.
"""
response = self.client.get(
reverse("admin:admin_views_modelwithstringprimarykey_changelist")
)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=("__fk__",)
).replace("__fk__", pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (
change_url,
escape(self.pk),
)
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"""
The link from the recent actions list referring to the changeform of
the object should be quoted.
"""
response = self.client.get(reverse("admin:index"))
link = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=(quote(self.pk),)
)
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_deleteconfirmation_link(self):
""" "
The link from the delete confirmation page referring back to the
changeform of the object should be quoted.
"""
url = reverse(
"admin:admin_views_modelwithstringprimarykey_delete", args=(quote(self.pk),)
)
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=("__fk__",)
).replace("__fk__", escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(
pk="i have something to add"
)
add_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(add_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse("admin:admin_views_modelwithstringprimarykey_add")
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(add_model2.pk),),
)
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(delete_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(history_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk="abc_123")
model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(model.pk),),
)
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse(
"admin:%s_modelwithstringprimarykey_change"
% ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse(
"admin:%s_modelwithstringprimarykey_history"
% ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),),
)
self.assertContains(
response, '<a href="%s" class="historylink"' % escape(expected_link)
)
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse("admin:admin_views_modelwithstringprimarykey_add"),
{
"string_pk": "123/history",
"_continue": "1", # Save and continue editing
},
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn("/123_2Fhistory/", response.headers["location"]) # PK is quoted
@override_settings(ROOT_URLCONF="admin_views.urls")
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
def test_secure_view_shows_login_if_not_logged_in(self):
secure_url = reverse("secure_view")
response = self.client.get(secure_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), secure_url)
)
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, "admin/login.html")
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = "/test_admin/admin/secure-view2/"
response = self.client.get(secure_url)
self.assertRedirects(
response, "%s?myfield=%s" % (reverse("admin:login"), secure_url)
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.b1 = Book.objects.create(name="Lærdommer")
cls.p1 = Promo.objects.create(name="<Promo for Lærdommer>", book=cls.b1)
cls.chap1 = Chapter.objects.create(
title="Norske bostaver æøå skaper problemer",
content="<p>Svært frustrerende med UnicodeDecodeErro</p>",
book=cls.b1,
)
cls.chap2 = Chapter.objects.create(
title="Kjærlighet",
content="<p>La kjærligheten til de lidende seire.</p>",
book=cls.b1,
)
cls.chap3 = Chapter.objects.create(
title="Kjærlighet", content="<p>Noe innhold</p>", book=cls.b1
)
cls.chap4 = ChapterXtra1.objects.create(
chap=cls.chap1, xtra="<Xtra(1) Norske bostaver æøå skaper problemer>"
)
cls.chap5 = ChapterXtra1.objects.create(
chap=cls.chap2, xtra="<Xtra(1) Kjærlighet>"
)
cls.chap6 = ChapterXtra1.objects.create(
chap=cls.chap3, xtra="<Xtra(1) Kjærlighet>"
)
cls.chap7 = ChapterXtra2.objects.create(
chap=cls.chap1, xtra="<Xtra(2) Norske bostaver æøå skaper problemer>"
)
cls.chap8 = ChapterXtra2.objects.create(
chap=cls.chap2, xtra="<Xtra(2) Kjærlighet>"
)
cls.chap9 = ChapterXtra2.objects.create(
chap=cls.chap3, xtra="<Xtra(2) Kjærlighet>"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": (
"<p>Svært frustrerende med UnicodeDecodeError</p>"
),
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": (
"<p>La kjærligheten til de lidende seire.</p>"
),
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(
reverse("admin:admin_views_book_change", args=(self.b1.pk,)), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
The delete_view handles non-ASCII characters
"""
delete_dict = {"post": "yes"}
delete_url = reverse("admin:admin_views_book_delete", args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse("admin:admin_views_book_changelist"))
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_inheritance(self):
Podcast.objects.create(
name="This Week in Django", release_date=datetime.date.today()
)
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse("admin:admin_views_vodcast_changelist"))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso="en", name="English", english_name="English")
response = self.client.get(reverse("admin:admin_views_language_changelist"))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse("admin:admin_views_person_changelist"))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 2
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 2 + 1 = 20 inputs
self.assertContains(response, "<input", count=21)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data, follow=True
)
self.assertEqual(len(response.context["messages"]), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": str(self.per3.pk),
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(
reverse("admin:admin_views_person_changelist") + "?gender__exact=1", data
)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(
reverse("admin:admin_views_person_changelist") + "?q=john", data
)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
"""
Non-field errors are displayed for each of the forms in the
changelist's formset.
"""
fd1 = FoodDelivery.objects.create(
reference="123", driver="bill", restaurant="thai"
)
fd2 = FoodDelivery.objects.create(
reference="456", driver="bill", restaurant="india"
)
fd3 = FoodDelivery.objects.create(
reference="789", driver="bill", restaurant="pizza"
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_fooddelivery_changelist"), data
)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
"with this Driver and Restaurant already exists.</li></ul></td></tr>",
1,
html=True,
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_fooddelivery_changelist"), data
)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
"with this Driver and Restaurant already exists.</li></ul></td></tr>",
2,
html=True,
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data
)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data
)
non_form_errors = response.context["cl"].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(
str(non_form_errors),
str(ErrorList(["Grace is not a Zombie"], error_class="nonform")),
)
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_category_changelist"), data
)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Pagination works for list_editable items.
"""
UnorderedObject.objects.create(id=1, name="Unordered object #1")
UnorderedObject.objects.create(id=2, name="Unordered object #2")
UnorderedObject.objects.create(id=3, name="Unordered object #3")
response = self.client.get(
reverse("admin:admin_views_unorderedobject_changelist")
)
self.assertContains(response, "Unordered object #3")
self.assertContains(response, "Unordered object #2")
self.assertNotContains(response, "Unordered object #1")
response = self.client.get(
reverse("admin:admin_views_unorderedobject_changelist") + "?p=2"
)
self.assertNotContains(response, "Unordered object #3")
self.assertNotContains(response, "Unordered object #2")
self.assertContains(response, "Unordered object #1")
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ["3"],
"action": ["", "delete_selected"],
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
"_selected_action": ["1"],
"action": ["", "delete_selected"],
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse("admin:admin_views_person_changelist"))
self.assertNotEqual(response.context["cl"].list_editable, ())
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?%s" % IS_POPUP_VAR
)
self.assertEqual(response.context["cl"].list_editable, ())
def test_pk_hidden_fields(self):
"""
hidden pk fields aren't displayed in the table body and their
corresponding human-readable value is displayed instead. The hidden pk
fields are displayed but separately (not in the table) and only once.
"""
story1 = Story.objects.create(
title="The adventures of Guido", content="Once upon a time in Djangoland..."
)
story2 = Story.objects.create(
title="Crouching Tiger, Hidden Python",
content="The Python was sneaking into...",
)
response = self.client.get(reverse("admin:admin_views_story_changelist"))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n'
"</div>" % (story2.id, story1.id),
html=True,
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
"""Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title="The adventures of Guido",
content="Once upon a time in Djangoland...",
)
story2 = OtherStory.objects.create(
title="Crouching Tiger, Hidden Python",
content="The Python was sneaking into...",
)
link1 = reverse("admin:admin_views_otherstory_change", args=(story1.pk,))
link2 = reverse("admin:admin_views_otherstory_change", args=(story2.pk,))
response = self.client.get(reverse("admin:admin_views_otherstory_changelist"))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n'
"</div>" % (story2.id, story1.id),
html=True,
)
self.assertContains(
response,
'<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id),
1,
)
self.assertContains(
response,
'<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id),
1,
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
Person.objects.create(name="John Doe", gender=1)
Person.objects.create(name='John O"Hara', gender=1)
Person.objects.create(name="John O'Hara", gender=1)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(the_recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(the_recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text="Bar")
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text="Foo")
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text="Few")
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text="Bas")
def setUp(self):
self.client.force_login(self.superuser)
def test_search_on_sibling_models(self):
"A search that mentions sibling models"
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
The to_field GET parameter is preserved when a search is performed.
Refs #10918.
"""
response = self.client.get(
reverse("admin:auth_user_changelist") + "?q=joe&%s=id" % TO_FIELD_VAR
)
self.assertContains(response, "\n1 user\n")
self.assertContains(
response,
'<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR,
html=True,
)
def test_exact_matches(self):
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=ba"
)
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=Gui"
)
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=uido"
)
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(
reverse("admin:admin_views_pluggablesearchperson_changelist") + "?q=Bob"
)
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(
reverse("admin:admin_views_pluggablesearchperson_changelist") + "?q=20"
)
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=Gui"
)
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">6 total</a>)</span>""",
html=True,
)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True,
)
self.assertTrue(response.context["cl"].show_admin_actions)
def test_search_with_spaces(self):
url = reverse("admin:admin_views_person_changelist") + "?q=%s"
tests = [
('"John Doe"', 1),
("'John Doe'", 1),
("John Doe", 0),
('"John Doe" John', 1),
("'John Doe' John", 1),
("John Doe John", 0),
('"John Do"', 1),
("'John Do'", 1),
("'John O'Hara'", 0),
("'John O\\'Hara'", 1),
('"John O"Hara"', 0),
('"John O\\"Hara"', 1),
]
for search, hits in tests:
with self.subTest(search=search):
response = self.client.get(url % search)
self.assertContains(response, "\n%s person" % hits)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_inline(self):
"""
Inline models which inherit from a common parent are correctly handled.
"""
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse("admin:admin_views_persona_add"))
names = name_re.findall(response.content)
names.remove(b"csrfmiddlewaretoken")
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse("admin:admin_views_persona_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(
reverse("admin:admin_views_persona_change", args=(persona_id,))
)
names = name_re.findall(response.content)
names.remove(b"csrfmiddlewaretoken")
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(
reverse("admin:admin_views_persona_change", args=(persona_id,)), post_data
)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse("admin:admin_views_gadget_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse("admin:admin_views_gadget_changelist"))
# Data is still not visible on the page
response = self.client.get(reverse("admin:admin_views_gadget_changelist"))
self.assertNotContains(response, "First Gadget")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse("admin:admin_views_parent_add"))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.pks = [EmptyModel.objects.create().id for i in range(3)]
def setUp(self):
self.client.force_login(self.superuser)
self.super_login = {
REDIRECT_FIELD_NAME: reverse("admin:index"),
"username": "super",
"password": "secret",
}
def test_changelist_view(self):
response = self.client.get(reverse("admin:admin_views_emptymodel_changelist"))
for i in self.pks:
if i > 1:
self.assertContains(response, "Primary key = %s" % i)
else:
self.assertNotContains(response, "Primary key = %s" % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name="person1", gender=1)
Person.objects.create(name="person2", gender=2)
changelist_url = reverse("admin:admin_views_person_changelist")
# 5 queries are expected: 1 for the session, 1 for the user,
# 2 for the counts and 1 for the objects on the page
with self.assertNumQueries(5):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context["selection_note"], "0 of 2 selected")
self.assertEqual(resp.context["selection_note_all"], "All 2 selected")
with self.assertNumQueries(5):
extra = {"q": "not_in_name"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 0 selected")
self.assertEqual(resp.context["selection_note_all"], "All 0 selected")
with self.assertNumQueries(5):
extra = {"q": "person"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 2 selected")
self.assertEqual(resp.context["selection_note_all"], "All 2 selected")
with self.assertNumQueries(5):
extra = {"gender__exact": "1"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 1 selected")
self.assertEqual(resp.context["selection_note_all"], "1 selected")
def test_change_view(self):
for i in self.pks:
url = reverse("admin:admin_views_emptymodel_change", args=(i,))
response = self.client.get(url, follow=True)
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["empty model with ID “1” doesn’t exist. Perhaps it was deleted?"],
)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_coverletter_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = CoverLetter.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The cover letter “<a href="%s">'
"Candidate, Best</a>” was added successfully.</li>"
% reverse("admin:admin_views_coverletter_change", args=(pk,)),
html=True,
)
# model has no __str__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_shortmessage_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
sm = ShortMessage.objects.all()[0]
self.assertContains(
response,
'<li class="success">The short message “<a href="%s">'
"%s</a>” was added successfully.</li>"
% (reverse("admin:admin_views_shortmessage_change", args=(sm.pk,)), sm),
html=True,
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_telegram_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Telegram.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The telegram “<a href="%s">'
"Urgent telegram</a>” was added successfully.</li>"
% reverse("admin:admin_views_telegram_change", args=(pk,)),
html=True,
)
# model has no __str__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_paper_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
p = Paper.objects.all()[0]
self.assertContains(
response,
'<li class="success">The paper “<a href="%s">'
"%s</a>” was added successfully.</li>"
% (reverse("admin:admin_views_paper_change", args=(p.pk,)), p),
html=True,
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_coverletter_change", args=(cl.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
url = reverse("admin:admin_views_coverletter_change", args=(cl.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The cover letter “<a href="%s">'
"John Doe II</a>” was changed successfully.</li>"
% reverse("admin:admin_views_coverletter_change", args=(cl.pk,)),
html=True,
)
# model has no __str__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_shortmessage_change", args=(sm.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
url = reverse("admin:admin_views_shortmessage_change", args=(sm.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The short message “<a href="%s">'
"%s</a>” was changed successfully.</li>"
% (reverse("admin:admin_views_shortmessage_change", args=(sm.pk,)), sm),
html=True,
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
t = Telegram.objects.create(title="First Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_telegram_change", args=(t.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_telegram_change", args=(t.pk,)),
post_data,
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The telegram “<a href="%s">'
"Telegram without typo</a>” was changed successfully.</li>"
% reverse("admin:admin_views_telegram_change", args=(t.pk,)),
html=True,
)
# model has no __str__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_paper_change", args=(p.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_paper_change", args=(p.pk,)),
post_data,
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The paper “<a href="%s">'
"%s</a>” was changed successfully.</li>"
% (reverse("admin:admin_views_paper_change", args=(p.pk,)), p),
html=True,
)
def test_history_view_custom_qs(self):
"""
Custom querysets are considered for the admin history view.
"""
self.client.post(reverse("admin:login"), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(
reverse("admin:admin_views_filteredmanager_changelist")
)
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(
reverse("admin:admin_views_filteredmanager_history", args=(1,))
).status_code,
200,
)
self.assertEqual(
self.client.get(
reverse("admin:admin_views_filteredmanager_history", args=(2,))
).status_code,
200,
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b"a" * (2**21))
filename = file1.name
file1.close()
cls.gallery = Gallery.objects.create(name="Test Gallery")
cls.picture = Picture.objects.create(
name="Test Picture",
image=filename,
gallery=cls.gallery,
)
def setUp(self):
self.client.force_login(self.superuser)
def test_form_has_multipart_enctype(self):
response = self.client.get(
reverse("admin:admin_views_gallery_change", args=(self.gallery.id,))
)
self.assertIs(response.context["has_file_field"], True)
self.assertContains(response, MULTIPART_ENCTYPE)
def test_inline_file_upload_edit_validation_error_post(self):
"""
Inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": str(self.picture.id),
"pictures-0-gallery": str(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse("admin:admin_views_gallery_change", args=(self.gallery.id,)),
post_data,
)
self.assertContains(response, b"Currently")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.collector = Collector.objects.create(pk=1, name="John Fowles")
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
self.client.force_login(self.superuser)
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data["widget_set-0-name"] = "Widget 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# No file or image fields, no enctype on the forms
self.assertIs(response.context["has_file_field"], False)
self.assertNotContains(response, MULTIPART_ENCTYPE)
# Now resave that inline
self.post_data["widget_set-INITIAL_FORMS"] = "1"
self.post_data["widget_set-0-id"] = str(widget_id)
self.post_data["widget_set-0-name"] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data["widget_set-INITIAL_FORMS"] = "1"
self.post_data["widget_set-0-id"] = str(widget_id)
self.post_data["widget_set-0-name"] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"""
A model with an explicit autofield primary key can be saved as inlines.
"""
# First add a new inline
self.post_data["grommet_set-0-name"] = "Grommet 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data["grommet_set-INITIAL_FORMS"] = "1"
self.post_data["grommet_set-0-code"] = str(Grommet.objects.all()[0].code)
self.post_data["grommet_set-0-name"] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data["grommet_set-INITIAL_FORMS"] = "1"
self.post_data["grommet_set-0-code"] = str(Grommet.objects.all()[0].code)
self.post_data["grommet_set-0-name"] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data["doohickey_set-INITIAL_FORMS"] = "1"
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data["doohickey_set-INITIAL_FORMS"] = "1"
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data["whatsit_set-INITIAL_FORMS"] = "1"
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data["whatsit_set-INITIAL_FORMS"] = "1"
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data["fancydoodad_set-INITIAL_FORMS"] = "1"
self.post_data["fancydoodad_set-0-doodad_ptr"] = str(doodad_pk)
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data["fancydoodad_set-INITIAL_FORMS"] = "1"
self.post_data["fancydoodad_set-0-doodad_ptr"] = str(doodad_pk)
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""
An inline with an editable ordering fields is updated correctly.
"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update(
{
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
}
)
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(ROOT_URLCONF="admin_views.urls")
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
def setUp(self):
self.client.force_login(self.superuser)
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse("admin:index"))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse("admin:admin_views_fabric_add"))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(
reverse("admin:admin_views_section_history", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse("admin:index"))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.post(reverse("admin:logout"))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse("admin:password_change"))
self.assertIsNone(get_max_age(response))
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse("admin:password_change_done"))
self.assertIsNone(get_max_age(response))
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse("admin:jsi18n"))
self.assertIsNone(get_max_age(response))
@override_settings(ROOT_URLCONF="admin_views.urls")
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_prepopulated_on(self):
response = self.client.get(reverse("admin:admin_views_prepopulatedpost_add"))
self.assertContains(response, ""id": "#id_slug"")
self.assertContains(
response, ""dependency_ids": ["#id_title"]"
)
self.assertContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug"",
)
def test_prepopulated_off(self):
response = self.client.get(
reverse("admin:admin_views_prepopulatedpost_change", args=(self.p1.pk,))
)
self.assertContains(response, "A Long Title")
self.assertNotContains(response, ""id": "#id_slug"")
self.assertNotContains(
response, ""dependency_ids": ["#id_title"]"
)
self.assertNotContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug"",
)
@override_settings(USE_THOUSAND_SEPARATOR=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(
reverse("admin:admin_views_prepopulatedpostlargeslug_add")
)
self.assertContains(response, ""maxLength": 1000") # instead of 1,000
def test_view_only_add_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'
which is present in the add view, even if the
ModelAdmin.has_change_permission() returns False.
"""
response = self.client.get(reverse("admin7:admin_views_prepopulatedpost_add"))
self.assertContains(response, "data-prepopulated-fields=")
self.assertContains(response, ""id": "#id_slug"")
def test_view_only_change_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That
doesn't break a view-only change view.
"""
response = self.client.get(
reverse("admin7:admin_views_prepopulatedpost_change", args=(self.p1.pk,))
)
self.assertContains(response, 'data-prepopulated-fields="[]"')
self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug)
@override_settings(ROOT_URLCONF="admin_views.urls")
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ["admin_views"] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
self.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def test_login_button_centered(self):
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + reverse("admin:login"))
button = self.selenium.find_element(By.CSS_SELECTOR, ".submit-row input")
offset_left = button.get_property("offsetLeft")
offset_right = button.get_property("offsetParent").get_property(
"offsetWidth"
) - (offset_left + button.get_property("offsetWidth"))
# Use assertAlmostEqual to avoid pixel rounding errors.
self.assertAlmostEqual(offset_left, offset_right, delta=3)
def test_prepopulated_fields(self):
"""
The JavaScript-automated prepopulated fields work with the main form
and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_mainprepopulated_add")
)
self.wait_for(".select2")
# Main form ----------------------------------------------------------
self.selenium.find_element(By.ID, "id_pubdate").send_keys("2012-02-18")
self.select_option("#id_status", "option two")
self.selenium.find_element(By.ID, "id_name").send_keys(
" the mAin nÀMë and it's awεšomeıııİ"
)
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
slug3 = self.selenium.find_element(By.ID, "id_slug3").get_attribute("value")
self.assertEqual(slug1, "the-main-name-and-its-awesomeiiii-2012-02-18")
self.assertEqual(slug2, "option-two-the-main-name-and-its-awesomeiiii")
self.assertEqual(
slug3, "the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131i"
)
# Stacked inlines with fieldsets -------------------------------------
# Initial inline
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-pubdate"
).send_keys("2011-12-17")
self.select_option("#id_relatedprepopulated_set-0-status", "option one")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-name"
).send_keys(" here is a sŤāÇkeð inline ! ")
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-slug2"
).get_attribute("value")
self.assertEqual(slug1, "here-is-a-stacked-inline-2011-12-17")
self.assertEqual(slug2, "option-one-here-is-a-stacked-inline")
initial_select2_inputs = self.selenium.find_elements(
By.CLASS_NAME, "select2-selection"
)
# Inline formsets have empty/invisible forms.
# Only the 4 visible select2 inputs are initialized.
num_initial_select2_inputs = len(initial_select2_inputs)
self.assertEqual(num_initial_select2_inputs, 4)
# Add an inline
self.selenium.find_elements(By.LINK_TEXT, "Add another Related prepopulated")[
0
].click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 2,
)
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-pubdate"
).send_keys("1999-01-25")
self.select_option("#id_relatedprepopulated_set-1-status", "option two")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-name"
).send_keys(
" now you haVe anöther sŤāÇkeð inline with a very ... "
"loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog "
"text... "
)
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-slug2"
).get_attribute("value")
# 50 characters maximum for slug1 field
self.assertEqual(slug1, "now-you-have-another-stacked-inline-with-a-very-lo")
# 60 characters maximum for slug2 field
self.assertEqual(
slug2, "option-two-now-you-have-another-stacked-inline-with-a-very-l"
)
# Tabular inlines ----------------------------------------------------
# Initial inline
element = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-status"
)
self.selenium.execute_script("window.scrollTo(0, %s);" % element.location["y"])
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-pubdate"
).send_keys("1234-12-07")
self.select_option("#id_relatedprepopulated_set-2-0-status", "option two")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-name"
).send_keys("And now, with a tÃbűlaŘ inline !!!")
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-slug2"
).get_attribute("value")
self.assertEqual(slug1, "and-now-with-a-tabular-inline-1234-12-07")
self.assertEqual(slug2, "option-two-and-now-with-a-tabular-inline")
# Add an inline
# Button may be outside the browser frame.
element = self.selenium.find_elements(
By.LINK_TEXT, "Add another Related prepopulated"
)[1]
self.selenium.execute_script("window.scrollTo(0, %s);" % element.location["y"])
element.click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 4,
)
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-pubdate"
).send_keys("1981-08-22")
self.select_option("#id_relatedprepopulated_set-2-1-status", "option one")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-name"
).send_keys(r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters')
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-slug2"
).get_attribute("value")
self.assertEqual(slug1, "tabular-inline-with-ignored-characters-1981-08-22")
self.assertEqual(slug2, "option-one-tabular-inline-with-ignored-characters")
# Add an inline without an initial inline.
# The button is outside of the browser frame.
self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);")
self.selenium.find_elements(By.LINK_TEXT, "Add another Related prepopulated")[
2
].click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 6,
)
# Stacked Inlines without fieldsets ----------------------------------
# Initial inline.
row_id = "id_relatedprepopulated_set-4-0-"
self.selenium.find_element(By.ID, f"{row_id}pubdate").send_keys("2011-12-12")
self.select_option(f"#{row_id}status", "option one")
self.selenium.find_element(By.ID, f"{row_id}name").send_keys(
" sŤāÇkeð inline ! "
)
slug1 = self.selenium.find_element(By.ID, f"{row_id}slug1").get_attribute(
"value"
)
slug2 = self.selenium.find_element(By.ID, f"{row_id}slug2").get_attribute(
"value"
)
self.assertEqual(slug1, "stacked-inline-2011-12-12")
self.assertEqual(slug2, "option-one")
# Add inline.
self.selenium.find_elements(
By.LINK_TEXT,
"Add another Related prepopulated",
)[3].click()
row_id = "id_relatedprepopulated_set-4-1-"
self.selenium.find_element(By.ID, f"{row_id}pubdate").send_keys("1999-01-20")
self.select_option(f"#{row_id}status", "option two")
self.selenium.find_element(By.ID, f"{row_id}name").send_keys(
" now you haVe anöther sŤāÇkeð inline with a very loooong "
)
slug1 = self.selenium.find_element(By.ID, f"{row_id}slug1").get_attribute(
"value"
)
slug2 = self.selenium.find_element(By.ID, f"{row_id}slug2").get_attribute(
"value"
)
self.assertEqual(slug1, "now-you-have-another-stacked-inline-with-a-very-lo")
self.assertEqual(slug2, "option-two")
# Save and check that everything is properly stored in the database
with self.wait_page_loaded():
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.assertEqual(MainPrepopulated.objects.count(), 1)
MainPrepopulated.objects.get(
name=" the mAin nÀMë and it's awεšomeıııİ",
pubdate="2012-02-18",
status="option two",
slug1="the-main-name-and-its-awesomeiiii-2012-02-18",
slug2="option-two-the-main-name-and-its-awesomeiiii",
slug3="the-main-nàmë-and-its-awεšomeıııi",
)
self.assertEqual(RelatedPrepopulated.objects.count(), 6)
RelatedPrepopulated.objects.get(
name=" here is a sŤāÇkeð inline ! ",
pubdate="2011-12-17",
status="option one",
slug1="here-is-a-stacked-inline-2011-12-17",
slug2="option-one-here-is-a-stacked-inline",
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=(
" now you haVe anöther sŤāÇkeð inline with a very ... "
"loooooooooooooooooo"
),
pubdate="1999-01-25",
status="option two",
slug1="now-you-have-another-stacked-inline-with-a-very-lo",
slug2="option-two-now-you-have-another-stacked-inline-with-a-very-l",
)
RelatedPrepopulated.objects.get(
name="And now, with a tÃbűlaŘ inline !!!",
pubdate="1234-12-07",
status="option two",
slug1="and-now-with-a-tabular-inline-1234-12-07",
slug2="option-two-and-now-with-a-tabular-inline",
)
RelatedPrepopulated.objects.get(
name=r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate="1981-08-22",
status="option one",
slug1="tabular-inline-with-ignored-characters-1981-08-22",
slug2="option-one-tabular-inline-with-ignored-characters",
)
def test_populate_existing_object(self):
"""
The prepopulation works for existing objects too, as long as
the original field is empty (#19082).
"""
from selenium.webdriver.common.by import By
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=" this is the mAin nÀMë",
pubdate="2012-02-18",
status="option two",
slug1="",
slug2="",
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
object_url = self.live_server_url + reverse(
"admin:admin_views_mainprepopulated_change", args=(item.id,)
)
self.selenium.get(object_url)
self.selenium.find_element(By.ID, "id_name").send_keys(" the best")
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
self.assertEqual(slug1, "this-is-the-main-name-the-best-2012-02-18")
self.assertEqual(slug2, "option-two-this-is-the-main-name-the-best")
# Save the object
with self.wait_page_loaded():
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.get(object_url)
self.selenium.find_element(By.ID, "id_name").send_keys(" hello")
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
self.assertEqual(slug1, "this-is-the-main-name-the-best-2012-02-18")
self.assertEqual(slug2, "option-two-this-is-the-main-name-the-best")
def test_collapsible_fieldset(self):
"""
The 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_article_add")
)
self.assertFalse(self.selenium.find_element(By.ID, "id_title").is_displayed())
self.selenium.find_elements(By.LINK_TEXT, "Show")[0].click()
self.assertTrue(self.selenium.find_element(By.ID, "id_title").is_displayed())
self.assertEqual(
self.selenium.find_element(By.ID, "fieldsetcollapser0").text, "Hide"
)
def test_selectbox_height_collapsible_fieldset(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin7:index"),
)
url = self.live_server_url + reverse("admin7:admin_views_pizza_add")
self.selenium.get(url)
self.selenium.find_elements(By.LINK_TEXT, "Show")[0].click()
from_filter_box = self.selenium.find_element(By.ID, "id_toppings_filter")
from_box = self.selenium.find_element(By.ID, "id_toppings_from")
to_filter_box = self.selenium.find_element(By.ID, "id_toppings_filter_selected")
to_box = self.selenium.find_element(By.ID, "id_toppings_to")
self.assertEqual(
(
to_filter_box.get_property("offsetHeight")
+ to_box.get_property("offsetHeight")
),
(
from_filter_box.get_property("offsetHeight")
+ from_box.get_property("offsetHeight")
),
)
def test_selectbox_height_not_collapsible_fieldset(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin7:index"),
)
url = self.live_server_url + reverse("admin7:admin_views_question_add")
self.selenium.get(url)
from_filter_box = self.selenium.find_element(
By.ID, "id_related_questions_filter"
)
from_box = self.selenium.find_element(By.ID, "id_related_questions_from")
to_filter_box = self.selenium.find_element(
By.ID, "id_related_questions_filter_selected"
)
to_box = self.selenium.find_element(By.ID, "id_related_questions_to")
self.assertEqual(
(
to_filter_box.get_property("offsetHeight")
+ to_box.get_property("offsetHeight")
),
(
from_filter_box.get_property("offsetHeight")
+ from_box.get_property("offsetHeight")
),
)
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
from selenium.webdriver.common.by import By
# First form field has a single widget
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
with self.wait_page_loaded():
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_picture_add")
)
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element(By.ID, "id_name"),
)
# First form field has a MultiWidget
with self.wait_page_loaded():
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_reservation_add")
)
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element(By.ID, "id_start_date_0"),
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
from selenium.webdriver.common.by import By
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse("admin:admin_views_pizza_change", args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(full_url)
self.selenium.find_element(By.CLASS_NAME, "deletelink").click()
# Click 'cancel' on the delete page.
self.selenium.find_element(By.CLASS_NAME, "cancel-link").click()
# Wait until we're back on the change page.
self.wait_for_text("#content h1", "Change pizza")
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
from selenium.webdriver.common.by import By
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse("admin:admin_views_pizza_change", args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(full_url)
self.selenium.find_element(By.CLASS_NAME, "deletelink").click()
# Click 'cancel' on the delete page.
self.selenium.find_element(By.CLASS_NAME, "cancel-link").click()
# Wait until we're back on the change page.
self.wait_for_text("#content h1", "Change pizza")
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name="Test section")
Article.objects.create(
title="foo",
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_article_changelist")
)
# Change popup
self.selenium.find_element(By.ID, "change_id_form-0-section").click()
self.wait_for_and_switch_to_popup()
self.wait_for_text("#content h1", "Change section")
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.clear()
name_input.send_keys("<i>edited section</i>")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# Hide sidebar.
toggle_button = self.selenium.find_element(
By.CSS_SELECTOR, "#toggle-nav-sidebar"
)
toggle_button.click()
select = Select(self.selenium.find_element(By.ID, "id_form-0-section"))
self.assertEqual(select.first_selected_option.text, "<i>edited section</i>")
# Rendered select2 input.
select2_display = self.selenium.find_element(
By.CLASS_NAME, "select2-selection__rendered"
)
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, "×\n<i>edited section</i>")
# Add popup
self.selenium.find_element(By.ID, "add_id_form-0-section").click()
self.wait_for_and_switch_to_popup()
self.wait_for_text("#content h1", "Add section")
self.selenium.find_element(By.ID, "id_name").send_keys("new section")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_form-0-section"))
self.assertEqual(select.first_selected_option.text, "new section")
select2_display = self.selenium.find_element(
By.CLASS_NAME, "select2-selection__rendered"
)
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, "×\nnew section")
def test_inline_uuid_pk_edit_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "change_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
self.assertEqual(select.first_selected_option.text, str(parent.id))
self.assertEqual(
select.first_selected_option.get_attribute("value"), str(parent.id)
)
def test_inline_uuid_pk_add_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url
+ reverse("admin:admin_views_relatedwithuuidpkmodel_add")
)
self.selenium.find_element(By.ID, "add_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
uuid_id = str(ParentWithUUIDPK.objects.first().id)
self.assertEqual(select.first_selected_option.text, uuid_id)
self.assertEqual(select.first_selected_option.get_attribute("value"), uuid_id)
def test_inline_uuid_pk_delete_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "delete_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//input[@value="Yes, I’m sure"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
self.assertEqual(ParentWithUUIDPK.objects.count(), 0)
self.assertEqual(select.first_selected_option.text, "---------")
self.assertEqual(select.first_selected_option.get_attribute("value"), "")
def test_inline_with_popup_cancel_delete(self):
"""Clicking ""No, take me back" on a delete popup closes the window."""
from selenium.webdriver.common.by import By
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "delete_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//a[text()="No, take me back"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertEqual(len(self.selenium.window_handles), 1)
def test_list_editable_raw_id_fields(self):
from selenium.webdriver.common.by import By
parent = ParentWithUUIDPK.objects.create(title="test")
parent2 = ParentWithUUIDPK.objects.create(title="test2")
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_changelist",
current_app=site2.name,
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "lookup_id_form-0-parent").click()
self.wait_for_and_switch_to_popup()
# Select "parent2" in the popup.
self.selenium.find_element(By.LINK_TEXT, str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element(By.ID, "id_form-0-parent").get_attribute(
"value"
)
self.assertEqual(value, str(parent2.pk))
def test_input_element_font(self):
"""
Browsers' default stylesheets override the font of inputs. The admin
adds additional CSS to handle this.
"""
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + reverse("admin:login"))
element = self.selenium.find_element(By.ID, "id_username")
# Some browsers quotes the fonts, some don't.
fonts = [
font.strip().strip('"')
for font in element.value_of_css_property("font-family").split(",")
]
self.assertEqual(
fonts,
[
"-apple-system",
"BlinkMacSystemFont",
"Segoe UI",
"system-ui",
"Roboto",
"Helvetica Neue",
"Arial",
"sans-serif",
"Apple Color Emoji",
"Segoe UI Emoji",
"Segoe UI Symbol",
"Noto Color Emoji",
],
)
def test_search_input_filtered_page(self):
from selenium.webdriver.common.by import By
Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
Person.objects.create(name="Grace Hopper", gender=1, alive=False)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
person_url = reverse("admin:admin_views_person_changelist") + "?q=Gui"
self.selenium.get(self.live_server_url + person_url)
self.assertGreater(
self.selenium.find_element(By.ID, "searchbar").rect["width"],
50,
)
def test_related_popup_index(self):
"""
Create a chain of 'self' related objects via popups.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_box_add", current_app=site.name)
self.selenium.get(self.live_server_url + add_url)
base_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup()
popup_window_test = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=3)
popup_window_test2 = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test2")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=4)
self.selenium.find_element(By.ID, "id_title").send_keys("test3")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(popup_window_test2)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test3").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(popup_window_test)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test2").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(base_window)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
def test_related_popup_incorrect_close(self):
"""
Cleanup child popups when closing a parent popup.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_box_add", current_app=site.name)
self.selenium.get(self.live_server_url + add_url)
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup()
test_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=3)
test2_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test2")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=4)
self.assertEqual(len(self.selenium.window_handles), 4)
self.selenium.switch_to.window(test2_window)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.wait_until(lambda d: len(d.window_handles) == 2, 1)
self.assertEqual(len(self.selenium.window_handles), 2)
# Close final popup to clean up test.
self.selenium.switch_to.window(test_window)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.wait_until(lambda d: len(d.window_handles) == 1, 1)
self.selenium.switch_to.window(self.selenium.window_handles[-1])
def test_hidden_fields_small_window(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin:index"),
)
self.selenium.get(self.live_server_url + reverse("admin:admin_views_story_add"))
field_title = self.selenium.find_element(By.CLASS_NAME, "field-title")
current_size = self.selenium.get_window_size()
try:
self.selenium.set_window_size(1024, 768)
self.assertIs(field_title.is_displayed(), False)
self.selenium.set_window_size(767, 575)
self.assertIs(field_title.is_displayed(), False)
finally:
self.selenium.set_window_size(current_size["width"], current_size["height"])
def test_updating_related_objects_updates_fk_selects_except_autocompletes(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
born_country_select_id = "id_born_country"
living_country_select_id = "id_living_country"
living_country_select2_textbox_id = "select2-id_living_country-container"
favorite_country_to_vacation_select_id = "id_favorite_country_to_vacation"
continent_select_id = "id_continent"
def _get_HTML_inside_element_by_id(id_):
return self.selenium.find_element(By.ID, id_).get_attribute("innerHTML")
def _get_text_inside_element_by_selector(selector):
return self.selenium.find_element(By.CSS_SELECTOR, selector).get_attribute(
"innerText"
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_traveler_add")
self.selenium.get(self.live_server_url + add_url)
# Add new Country from the born_country select.
self.selenium.find_element(By.ID, f"add_{born_country_select_id}").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_name").send_keys("Argentina")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("South America")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
""",
)
# Argentina isn't added to the living_country select nor selected by
# the select2 widget.
self.assertEqual(
_get_text_inside_element_by_selector(f"#{living_country_select_id}"), ""
)
self.assertEqual(
_get_text_inside_element_by_selector(
f"#{living_country_select2_textbox_id}"
),
"",
)
# Argentina won't appear because favorite_country_to_vacation field has
# limit_choices_to.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Add new Country from the living_country select.
self.selenium.find_element(By.ID, f"add_{living_country_select_id}").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_name").send_keys("Spain")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("Europe")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
<option value="2">Spain</option>
""",
)
# Spain is added to the living_country select and it's also selected by
# the select2 widget.
self.assertEqual(
_get_text_inside_element_by_selector(f"#{living_country_select_id} option"),
"Spain",
)
self.assertEqual(
_get_text_inside_element_by_selector(
f"#{living_country_select2_textbox_id}"
),
"Spain",
)
# Spain won't appear because favorite_country_to_vacation field has
# limit_choices_to.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Edit second Country created from living_country select.
favorite_select = Select(
self.selenium.find_element(By.ID, living_country_select_id)
)
favorite_select.select_by_visible_text("Spain")
self.selenium.find_element(By.ID, f"change_{living_country_select_id}").click()
self.wait_for_and_switch_to_popup()
favorite_name_input = self.selenium.find_element(By.ID, "id_name")
favorite_name_input.clear()
favorite_name_input.send_keys("Italy")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
<option value="2">Italy</option>
""",
)
# Italy is added to the living_country select and it's also selected by
# the select2 widget.
self.assertEqual(
_get_text_inside_element_by_selector(f"#{living_country_select_id} option"),
"Italy",
)
self.assertEqual(
_get_text_inside_element_by_selector(
f"#{living_country_select2_textbox_id}"
),
"Italy",
)
# favorite_country_to_vacation field has no options.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Add a new Asian country.
self.selenium.find_element(
By.ID, f"add_{favorite_country_to_vacation_select_id}"
).click()
self.wait_for_and_switch_to_popup()
favorite_name_input = self.selenium.find_element(By.ID, "id_name")
favorite_name_input.send_keys("Qatar")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("Asia")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# Submit the new Traveler.
self.selenium.find_element(By.CSS_SELECTOR, '[name="_save"]').click()
traveler = Traveler.objects.get()
self.assertEqual(traveler.born_country.name, "Argentina")
self.assertEqual(traveler.living_country.name, "Italy")
self.assertEqual(traveler.favorite_country_to_vacation.name, "Qatar")
def test_redirect_on_add_view_add_another_button(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin7:admin_views_section_add")
self.selenium.get(self.live_server_url + add_url)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.send_keys("Test section 1")
self.selenium.find_element(
By.XPATH, '//input[@value="Save and add another"]'
).click()
self.assertEqual(Section.objects.count(), 1)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.send_keys("Test section 2")
self.selenium.find_element(
By.XPATH, '//input[@value="Save and add another"]'
).click()
self.assertEqual(Section.objects.count(), 2)
def test_redirect_on_add_view_continue_button(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin7:admin_views_section_add")
self.selenium.get(self.live_server_url + add_url)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.send_keys("Test section 1")
self.selenium.find_element(
By.XPATH, '//input[@value="Save and continue editing"]'
).click()
self.assertEqual(Section.objects.count(), 1)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input_value = name_input.get_attribute("value")
self.assertEqual(name_input_value, "Test section 1")
@override_settings(ROOT_URLCONF="admin_views.urls")
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_readonly_get(self):
response = self.client.get(reverse("admin:admin_views_post_add"))
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
# + 1 logout form.
self.assertContains(response, "<input", count=17)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response, "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Multiline text in a readonly field gets <br> tags
self.assertContains(response, "Multiline<br>test<br>string")
self.assertContains(
response,
'<div class="readonly">Multiline<br>html<br>content</div>',
html=True,
)
self.assertContains(response, "InlineMultiline<br>test<br>string")
self.assertContains(
response,
formats.localize(datetime.date.today() - datetime.timedelta(days=7)),
)
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<div class="help"', 3)
self.assertContains(
response,
'<div class="help" id="id_title_helptext">Some help text for the title '
"(with Unicode ŠĐĆŽćžšđ)</div>",
html=True,
)
self.assertContains(
response,
'<div class="help" id="id_content_helptext">Some help text for the content '
"(with Unicode ŠĐĆŽćžšđ)</div>",
html=True,
)
self.assertContains(
response,
'<div class="help">Some help text for the date (with Unicode ŠĐĆŽćžšđ)'
"</div>",
html=True,
)
p = Post.objects.create(
title="I worked on readonly_fields", content="Its good stuff"
)
response = self.client.get(
reverse("admin:admin_views_post_change", args=(p.pk,))
)
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test",
content="test",
readonly_content="test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest",
)
Link.objects.create(
url="http://www.djangoproject.com",
post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(
reverse("admin:admin_views_post_change", args=(p.pk,))
)
# Checking readonly field.
self.assertContains(response, "test<br><br>test<br><br>test<br><br>test")
# Checking readonly field in inline.
self.assertContains(response, "test<br>link")
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse("admin:admin_views_post_add"), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse("admin:admin_views_post_add"), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by("-id")[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse("admin:admin_views_pizza_add"))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(
reverse("admin2:auth_user_password_change", args=(su.pk,))
)
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(
reverse("admin:admin_views_choice_change", args=(choice.pk,))
)
self.assertContains(
response, '<div class="readonly">No opinion</div>', html=True
)
def _test_readonly_foreignkey_links(self, admin_site):
"""
ForeignKey readonly fields render as links if the target model is
registered in admin.
"""
chapter = Chapter.objects.create(
title="Chapter 1",
content="content",
book=Book.objects.create(name="Book 1"),
)
language = Language.objects.create(iso="_40", name="Test")
obj = ReadOnlyRelatedField.objects.create(
chapter=chapter,
language=language,
user=self.superuser,
)
response = self.client.get(
reverse(
f"{admin_site}:admin_views_readonlyrelatedfield_change", args=(obj.pk,)
),
)
# Related ForeignKey object registered in admin.
user_url = reverse(f"{admin_site}:auth_user_change", args=(self.superuser.pk,))
self.assertContains(
response,
'<div class="readonly"><a href="%s">super</a></div>' % user_url,
html=True,
)
# Related ForeignKey with the string primary key registered in admin.
language_url = reverse(
f"{admin_site}:admin_views_language_change",
args=(quote(language.pk),),
)
self.assertContains(
response,
'<div class="readonly"><a href="%s">_40</a></div>' % language_url,
html=True,
)
# Related ForeignKey object not registered in admin.
self.assertContains(
response, '<div class="readonly">Chapter 1</div>', html=True
)
def test_readonly_foreignkey_links_default_admin_site(self):
self._test_readonly_foreignkey_links("admin")
def test_readonly_foreignkey_links_custom_admin_site(self):
self._test_readonly_foreignkey_links("namespaced_admin")
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name="Salami")
pizza = Pizza.objects.create(name="Americano")
pizza.toppings.add(topping)
response = self.client.get(reverse("admin:admin_views_topping_add"))
self.assertEqual(response.status_code, 200)
def test_readonly_manytomany_forwards_ref(self):
topping = Topping.objects.create(name="Salami")
pizza = Pizza.objects.create(name="Americano")
pizza.toppings.add(topping)
response = self.client.get(
reverse("admin:admin_views_pizza_change", args=(pizza.pk,))
)
self.assertContains(response, "<label>Toppings:</label>", html=True)
self.assertContains(response, '<div class="readonly">Salami</div>', html=True)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name="Adam")
pl = Plot.objects.create(name="Test Plot", team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details="Brand New Plot", plot=pl)
response = self.client.get(
reverse("admin:admin_views_plotproxy_change", args=(pl.pk,))
)
field = self.get_admin_readonly_field(response, "plotdetails")
pd_url = reverse("admin:admin_views_plotdetails_change", args=(pd.pk,))
self.assertEqual(field.contents(), '<a href="%s">Brand New Plot</a>' % pd_url)
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(
reverse("admin:admin_views_plotproxy_change", args=(pl.pk,))
)
field = self.get_admin_readonly_field(response, "plotdetails")
self.assertEqual(field.contents(), "-") # default empty value
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(
reverse("admin:admin_views_fieldoverridepost_change", args=(p.pk,))
)
self.assertContains(
response,
'<div class="help">Overridden help text for the date</div>',
html=True,
)
self.assertContains(
response,
'<label for="id_public">Overridden public label:</label>',
html=True,
)
self.assertNotContains(
response, "Some help text for the date (with Unicode ŠĐĆŽćžšđ)"
)
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name="<a>evil</a>")
response = self.client.get(
reverse("admin:admin_views_section_change", args=(section.pk,))
)
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
def test_label_suffix_translated(self):
pizza = Pizza.objects.create(name="Americano")
url = reverse("admin:admin_views_pizza_change", args=(pizza.pk,))
with self.settings(LANGUAGE_CODE="fr"):
response = self.client.get(url)
self.assertContains(response, "<label>Toppings\u00A0:</label>", html=True)
@override_settings(ROOT_URLCONF="admin_views.urls")
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username="threepwood",
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username="marley",
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse("admin:admin_views_stumpjoke_add"))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(ROOT_URLCONF="admin_views.urls")
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
Inquisition.objects.create(expected=False, leader=actor, country="Spain")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse("admin:admin_views_inquisition_changelist"))
self.assertContains(response, "list-display-sketch")
@override_settings(ROOT_URLCONF="admin_views.urls")
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
},
)
new_user = User.objects.get(username="newuser")
self.assertRedirects(
response, reverse("admin:auth_user_change", args=(new_user.pk,))
)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"_continue": "1",
},
)
new_user = User.objects.get(username="newuser")
new_user_url = reverse("admin:auth_user_change", args=(new_user.pk,))
self.assertRedirects(response, new_user_url, fetch_redirect_response=False)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
response = self.client.get(new_user_url)
self.assertContains(
response,
'<li class="success">The user “<a href="%s">'
"%s</a>” was added successfully. You may edit it again below.</li>"
% (new_user_url, new_user),
html=True,
)
def test_password_mismatch(self):
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "mismatch",
},
)
self.assertEqual(response.status_code, 200)
self.assertFormError(response.context["adminform"], "password1", [])
self.assertFormError(
response.context["adminform"],
"password2",
["The two password fields didn’t match."],
)
def test_user_fk_add_popup(self):
"""
User addition through a FK popup should return the appropriate
JavaScript response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(response, reverse("admin:auth_user_add"))
self.assertContains(
response,
'class="related-widget-wrapper-link add-related" id="add_id_owner"',
)
response = self.client.get(
reverse("admin:auth_user_add") + "?%s=1" % IS_POPUP_VAR
)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
IS_POPUP_VAR: "1",
"_save": "1",
}
response = self.client.post(
reverse("admin:auth_user_add") + "?%s=1" % IS_POPUP_VAR, data, follow=True
)
self.assertContains(response, ""obj": "newuser"")
def test_user_fk_change_popup(self):
"""
User change through a FK popup should return the appropriate JavaScript
response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(
response, reverse("admin:auth_user_change", args=("__fk__",))
)
self.assertContains(
response,
'class="related-widget-wrapper-link change-related" id="change_id_owner"',
)
user = User.objects.get(username="changeuser")
url = (
reverse("admin:auth_user_change", args=(user.pk,)) + "?%s=1" % IS_POPUP_VAR
)
response = self.client.get(url)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"last_login_0": "2007-05-30",
"last_login_1": "13:20:10",
"date_joined_0": "2007-05-30",
"date_joined_1": "13:20:10",
IS_POPUP_VAR: "1",
"_save": "1",
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, ""obj": "newuser"")
self.assertContains(response, ""action": "change"")
def test_user_fk_delete_popup(self):
"""
User deletion through a FK popup should return the appropriate
JavaScript response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(
response, reverse("admin:auth_user_delete", args=("__fk__",))
)
self.assertContains(
response,
'class="related-widget-wrapper-link change-related" id="change_id_owner"',
)
user = User.objects.get(username="changeuser")
url = (
reverse("admin:auth_user_delete", args=(user.pk,)) + "?%s=1" % IS_POPUP_VAR
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
"post": "yes",
IS_POPUP_VAR: "1",
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, ""action": "delete"")
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"_addanother": "1",
},
)
new_user = User.objects.order_by("-id")[0]
self.assertRedirects(response, reverse("admin:auth_user_add"))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
expected_num_queries = 10 if connection.features.uses_savepoints else 8
with self.assertNumQueries(expected_num_queries):
response = self.client.get(reverse("admin:auth_user_change", args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(
reverse("admin3:auth_user_password_change", args=(u.pk,))
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["form_url"], "pony")
@override_settings(ROOT_URLCONF="admin_views.urls")
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(
reverse("admin:auth_group_add"),
{
"name": "newgroup",
},
)
Group.objects.order_by("-id")[0]
self.assertRedirects(response, reverse("admin:auth_group_changelist"))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
expected_num_queries = 8 if connection.features.uses_savepoints else 6
with self.assertNumQueries(expected_num_queries):
response = self.client.get(reverse("admin:auth_group_change", args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF="admin_views.urls")
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_field_prefix_css_classes(self):
"""
Fields have a CSS class name with a 'field-' prefix.
"""
response = self.client.get(reverse("admin:admin_views_post_add"))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
CSS class names are used for each app and model on the admin index
pages (#17050).
"""
# General index page
response = self.client.get(reverse("admin:index"))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse("admin:admin_views_section_changelist"))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [self.s1.pk],
"action": "delete_selected",
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_section_changelist"), action_data
)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their
class attribute.
"""
Podcast.objects.create(name="Django Dose", release_date=datetime.date.today())
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertContains(response, '<th class="field-name">')
self.assertContains(response, '<td class="field-release_date nowrap">')
self.assertContains(response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(ROOT_URLCONF="admin_views.urls")
@modify_settings(
INSTALLED_APPS={"append": ["django.contrib.admindocs", "django.contrib.flatpages"]}
)
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_tags(self):
response = self.client.get(reverse("django-admindocs-tags"))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(
response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#built_in-autoescape">autoescape</a></li>',
html=True,
)
# An app tag exists in both the index and detail
self.assertContains(
response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>',
html=True,
)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(
response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#admin_list-admin_actions">admin_actions</a></li>',
html=True,
)
def test_filters(self):
response = self.client.get(reverse("django-admindocs-filters"))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(
response, '<li><a href="#built_in-add">add</a></li>', html=True
)
@override_settings(
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_lang_name_present(self):
with translation.override(None):
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(ROOT_URLCONF="admin_views.urls", USE_THOUSAND_SEPARATOR=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def assert_non_localized_year(self, response, year):
"""
The year is not localized with USE_THOUSAND_SEPARATOR (#15234).
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % date.year)
def assert_contains_month_link(self, response, date):
self.assertContains(
response,
'?release_date__month=%d&release_date__year=%d"'
% (date.month, date.year),
)
def assert_contains_day_link(self, response, date):
self.assertContains(
response,
"?release_date__day=%d&"
'release_date__month=%d&release_date__year=%d"'
% (date.day, date.month, date.year),
)
def test_empty(self):
"""
No date hierarchy links display with empty changelist.
"""
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertNotContains(response, "release_date__year=")
self.assertNotContains(response, "release_date__month=")
self.assertNotContains(response, "release_date__day=")
def test_single(self):
"""
Single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
day-level links appear for changelist within single month.
"""
DATES = (
datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
month-level links appear for changelist within single year.
"""
DATES = (
datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, "release_date__day=")
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
year-level links appear for year-spanning changelist.
"""
DATES = (
datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
# no day/month-level links
self.assertNotContains(response, "release_date__day=")
self.assertNotContains(response, "release_date__month=")
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = "%s?release_date__year=%d" % (
reverse("admin:admin_views_podcast_changelist"),
date.year,
)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = "%s?release_date__year=%d&release_date__month=%d" % (
reverse("admin:admin_views_podcast_changelist"),
date.year,
date.month,
)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
def test_related_field(self):
questions_data = (
# (posted data, number of answers),
(datetime.date(2001, 1, 30), 0),
(datetime.date(2003, 3, 15), 1),
(datetime.date(2005, 5, 3), 2),
)
for date, answer_count in questions_data:
question = Question.objects.create(posted=date)
for i in range(answer_count):
question.answer_set.create()
response = self.client.get(reverse("admin:admin_views_answer_changelist"))
for date, answer_count in questions_data:
link = '?question__posted__year=%d"' % date.year
if answer_count > 0:
self.assertContains(response, link)
else:
self.assertNotContains(response, link)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminCustomSaveRelatedTests(TestCase):
"""
One can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
"child_set-TOTAL_FORMS": "3",
"child_set-INITIAL_FORMS": "0",
"name": "Josh Stone",
"child_set-0-name": "Paul",
"child_set-1-name": "Catherine",
}
self.client.post(reverse("admin:admin_views_parent_add"), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name="Josh Stone")
paul = Child.objects.create(parent=parent, name="Paul")
catherine = Child.objects.create(parent=parent, name="Catherine")
post = {
"child_set-TOTAL_FORMS": "5",
"child_set-INITIAL_FORMS": "2",
"name": "Josh Stone",
"child_set-0-name": "Paul",
"child_set-0-id": paul.id,
"child_set-1-name": "Catherine",
"child_set-1-id": catherine.id,
}
self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.id,)), post
)
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name="Josh Rock")
Child.objects.create(parent=parent, name="Paul")
Child.objects.create(parent=parent, name="Catherine")
post = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": parent.id,
"form-0-name": "Josh Stone",
"_save": "Save",
}
self.client.post(reverse("admin:admin_views_parent_changelist"), post)
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewLogoutTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def test_logout(self):
self.client.force_login(self.superuser)
response = self.client.post(reverse("admin:logout"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "registration/logged_out.html")
self.assertEqual(response.request["PATH_INFO"], reverse("admin:logout"))
self.assertFalse(response.context["has_permission"])
self.assertNotContains(
response, "user-tools"
) # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.post(reverse("admin:logout"))
self.assertEqual(
response.status_code, 302
) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.post(reverse("admin:logout"), follow=True)
self.assertContains(
response,
'<input type="hidden" name="next" value="%s">' % reverse("admin:index"),
)
self.assertTemplateUsed(response, "admin/login.html")
self.assertEqual(response.request["PATH_INFO"], reverse("admin:login"))
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
"action": "message_%s" % level,
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_usermessenger_changelist"),
action_data,
follow=True,
)
self.assertContains(
response, '<li class="%s">Test %s</li>' % (level, level), html=True
)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message("debug")
def test_message_info(self):
self.send_message("info")
def test_message_success(self):
self.send_message("success")
def test_message_warning(self):
self.send_message("warning")
def test_message_error(self):
self.send_message("error")
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
"action": "message_extra_tags",
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_usermessenger_changelist"),
action_data,
follow=True,
)
self.assertContains(
response, '<li class="extra_tag info">Test tags</li>', html=True
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
def setUp(self):
self.client.force_login(self.superuser)
def assertURLEqual(self, url1, url2, msg_prefix=""):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if "_changelist_filters" in parsed_qs:
changelist_filters = parsed_qs["_changelist_filters"]
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs["_changelist_filters"] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse(
"admin:auth_user_change", args=(self.joepublicuser.pk,)
)
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D1%26is_superuser__exact%3D1".format(change_user_url),
)
# Ignore scheme and host.
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
)
# Ignore ordering of querystring.
self.assertURLEqual(
"{}?is_staff__exact=0&is_superuser__exact=0".format(
reverse("admin:auth_user_changelist")
),
"{}?is_superuser__exact=0&is_staff__exact=0".format(
reverse("admin:auth_user_changelist")
),
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
"{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"{}?_changelist_filters="
"is_superuser__exact%3D0%26is_staff__exact%3D0".format(change_user_url),
)
def get_changelist_filters(self):
return {
"is_superuser__exact": 0,
"is_staff__exact": 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode(
{"_changelist_filters": self.get_changelist_filters_querystring()}
)
def get_sample_user_id(self):
return self.joepublicuser.pk
def get_changelist_url(self):
return "%s?%s" % (
reverse("admin:auth_user_changelist", current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self, add_preserved_filters=True):
url = reverse("admin:auth_user_add", current_app=self.admin_site.name)
if add_preserved_filters:
url = "%s?%s" % (url, self.get_preserved_filters_querystring())
return url
def get_change_url(self, user_id=None, add_preserved_filters=True):
if user_id is None:
user_id = self.get_sample_user_id()
url = reverse(
"admin:auth_user_change", args=(user_id,), current_app=self.admin_site.name
)
if add_preserved_filters:
url = "%s?%s" % (url, self.get_preserved_filters_querystring())
return url
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse(
"admin:auth_user_history",
args=(user_id,),
current_app=self.admin_site.name,
),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse(
"admin:auth_user_delete",
args=(user_id,),
current_app=self.admin_site.name,
),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.joepublicuser.username),
response.content.decode(),
)
self.assertURLEqual(detail_link[1], self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode(),
)
self.assertURLEqual(
form_action[1], "?%s" % self.get_preserved_filters_querystring()
)
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>', response.content.decode()
)
self.assertURLEqual(history_link[1], self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>', response.content.decode()
)
self.assertURLEqual(delete_link[1], self.get_delete_url())
# Test redirect on "Save".
post_data = {
"username": "joepublic",
"last_login_0": "2007-05-30",
"last_login_1": "13:20:10",
"date_joined_0": "2007-05-30",
"date_joined_1": "13:20:10",
}
post_data["_save"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_changelist_url())
post_data.pop("_save")
# Test redirect on "Save and continue".
post_data["_continue"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_change_url())
post_data.pop("_continue")
# Test redirect on "Save and add new".
post_data["_addanother"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop("_addanother")
def test_change_view_without_preserved_filters(self):
response = self.client.get(self.get_change_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode(),
)
self.assertURLEqual(
form_action[1], "?%s" % self.get_preserved_filters_querystring()
)
post_data = {
"username": "dummy",
"password1": "test",
"password2": "test",
}
# Test redirect on "Save".
post_data["_save"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(
response, self.get_change_url(User.objects.get(username="dummy").pk)
)
post_data.pop("_save")
# Test redirect on "Save and continue".
post_data["username"] = "dummy2"
post_data["_continue"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(
response, self.get_change_url(User.objects.get(username="dummy2").pk)
)
post_data.pop("_continue")
# Test redirect on "Save and add new".
post_data["username"] = "dummy3"
post_data["_addanother"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop("_addanother")
def test_add_view_without_preserved_filters(self):
response = self.client.get(self.get_add_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {"post": "yes"})
self.assertRedirects(response, self.get_changelist_url())
def test_url_prefix(self):
context = {
"preserved_filters": self.get_preserved_filters_querystring(),
"opts": User._meta,
}
prefixes = ("", "/prefix/", "/後台/")
for prefix in prefixes:
with self.subTest(prefix=prefix), override_script_prefix(prefix):
url = reverse(
"admin:auth_user_changelist", current_app=self.admin_site.name
)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestLabelVisibility(TestCase):
"""#11277 -Labels of hidden fields in admin were not hidden."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_all_fields_visible(self):
response = self.client.get(reverse("admin:admin_views_emptymodelvisible_add"))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, "first")
self.assert_field_visible(response, "second")
def test_all_fields_hidden(self):
response = self.client.get(reverse("admin:admin_views_emptymodelhidden_add"))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, "first")
self.assert_field_hidden(response, "second")
def test_mixin(self):
response = self.client.get(reverse("admin:admin_views_emptymodelmixin_add"))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, "first")
self.assert_field_visible(response, "second")
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="fieldBox field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(
response, '<div class="fieldBox field-%s hidden">' % field_name
)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = State.objects.create(name="New York")
cls.s2 = State.objects.create(name="Illinois")
cls.s3 = State.objects.create(name="California")
cls.c1 = City.objects.create(state=cls.s1, name="New York")
cls.c2 = City.objects.create(state=cls.s2, name="Chicago")
cls.c3 = City.objects.create(state=cls.s3, name="San Francisco")
cls.r1 = Restaurant.objects.create(city=cls.c1, name="Italian Pizza")
cls.r2 = Restaurant.objects.create(city=cls.c1, name="Boulevard")
cls.r3 = Restaurant.objects.create(city=cls.c2, name="Chinese Dinner")
cls.r4 = Restaurant.objects.create(city=cls.c2, name="Angels")
cls.r5 = Restaurant.objects.create(city=cls.c2, name="Take Away")
cls.r6 = Restaurant.objects.create(city=cls.c3, name="The Unknown Restaurant")
cls.w1 = Worker.objects.create(work_at=cls.r1, name="Mario", surname="Rossi")
cls.w2 = Worker.objects.create(
work_at=cls.r1, name="Antonio", surname="Bianchi"
)
cls.w3 = Worker.objects.create(work_at=cls.r1, name="John", surname="Doe")
def setUp(self):
self.client.force_login(self.superuser)
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data.
Also, assertFormError() and assertFormSetError() is usable for admin
forms and formsets.
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2",
}
response = self.client.post(
reverse("admin:admin_views_parentwithdependentchildren_add"), post_data
)
self.assertFormError(
response.context["adminform"],
"some_required_info",
["This field is required."],
)
self.assertFormError(response.context["adminform"], None, [])
self.assertFormSetError(
response.context["inline_admin_formset"],
0,
None,
[
"Children must share a family name with their parents in this "
"contrived test case"
],
)
self.assertFormSetError(
response.context["inline_admin_formset"], None, None, []
)
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(
some_required_info=6, family_name="Test1"
)
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1",
}
response = self.client.post(
reverse(
"admin:admin_views_parentwithdependentchildren_change", args=(pwdc.id,)
),
post_data,
)
self.assertFormError(
response.context["adminform"],
"some_required_info",
["This field is required."],
)
self.assertFormSetError(
response.context["inline_admin_formset"],
0,
None,
[
"Children must share a family name with their parents in this "
"contrived test case"
],
)
def test_check(self):
"The view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(
admin.check(),
[
Error(
"The value of 'view_on_site' must be a callable or a boolean "
"value.",
obj=CityAdmin,
id="admin.E025",
),
],
)
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(
reverse("admin:admin_views_restaurant_change", args=(self.r1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(
response, reverse("admin:view_on_site", args=(content_type_pk, 1))
)
def test_true(self):
"The default behavior is followed if view_on_site is True"
response = self.client.get(
reverse("admin:admin_views_city_change", args=(self.c1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.c1.pk))
)
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(
reverse("admin:admin_views_worker_change", args=(self.w1.pk,))
)
self.assertContains(
response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name)
)
def test_missing_get_absolute_url(self):
"None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
def test_custom_admin_site(self):
model_admin = ModelAdmin(City, customadmin.site)
content_type_pk = ContentType.objects.get_for_model(City).pk
redirect_url = model_admin.get_view_on_site_url(self.c1)
self.assertEqual(
redirect_url,
reverse(
f"{customadmin.site.name}:view_on_site",
kwargs={
"content_type_id": content_type_pk,
"object_id": self.c1.pk,
},
),
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = State.objects.create(name="New York")
cls.s2 = State.objects.create(name="Illinois")
cls.s3 = State.objects.create(name="California")
cls.c1 = City.objects.create(state=cls.s1, name="New York")
cls.c2 = City.objects.create(state=cls.s2, name="Chicago")
cls.c3 = City.objects.create(state=cls.s3, name="San Francisco")
cls.r1 = Restaurant.objects.create(city=cls.c1, name="Italian Pizza")
cls.r2 = Restaurant.objects.create(city=cls.c1, name="Boulevard")
cls.r3 = Restaurant.objects.create(city=cls.c2, name="Chinese Dinner")
cls.r4 = Restaurant.objects.create(city=cls.c2, name="Angels")
cls.r5 = Restaurant.objects.create(city=cls.c2, name="Take Away")
cls.r6 = Restaurant.objects.create(city=cls.c3, name="The Unknown Restaurant")
cls.w1 = Worker.objects.create(work_at=cls.r1, name="Mario", surname="Rossi")
cls.w2 = Worker.objects.create(
work_at=cls.r1, name="Antonio", surname="Bianchi"
)
cls.w3 = Worker.objects.create(work_at=cls.r1, name="John", surname="Doe")
def setUp(self):
self.client.force_login(self.superuser)
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(
reverse("admin:admin_views_state_change", args=(self.s1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.c1.pk))
)
def test_true(self):
"The 'View on site' button is displayed if view_on_site is True"
response = self.client.get(
reverse("admin:admin_views_city_change", args=(self.c1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.r1.pk))
)
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(
reverse("admin:admin_views_restaurant_change", args=(self.r1.pk,))
)
self.assertContains(
response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name)
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_explicitly_provided_pk(self):
post_data = {"name": "1"}
response = self.client.post(
reverse("admin:admin_views_explicitlyprovidedpk_add"), post_data
)
self.assertEqual(response.status_code, 302)
post_data = {"name": "2"}
response = self.client.post(
reverse("admin:admin_views_explicitlyprovidedpk_change", args=(1,)),
post_data,
)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {"name": "1"}
response = self.client.post(
reverse("admin:admin_views_implicitlygeneratedpk_add"), post_data
)
self.assertEqual(response.status_code, 302)
post_data = {"name": "2"}
response = self.client.post(
reverse("admin:admin_views_implicitlygeneratedpk_change", args=(1,)),
post_data,
)
self.assertEqual(response.status_code, 302)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminSiteFinalCatchAllPatternTests(TestCase):
"""
Verifies the behaviour of the admin catch-all view.
* Anonynous/non-staff users are redirected to login for all URLs, whether
otherwise valid or not.
* APPEND_SLASH is applied for staff if needed.
* Otherwise Http404.
* Catch-all view disabled via AdminSite.final_catch_all_view.
"""
def test_unknown_url_redirects_login_if_not_authenticated(self):
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), unknown_url)
)
def test_unknown_url_404_if_authenticated(self):
superuser = User.objects.create_superuser(
username="super",
password="secret",
email="[email protected]",
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_authenticated(self):
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), known_url)
)
def test_known_url_missing_slash_redirects_login_if_not_authenticated(self):
known_url = reverse("admin:admin_views_article_changelist")[:-1]
response = self.client.get(known_url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), known_url)
)
def test_non_admin_url_shares_url_prefix(self):
url = reverse("non_admin")[:-1]
response = self.client.get(url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
def test_url_without_trailing_slash_if_not_authenticated(self):
url = reverse("admin:article_extra_json")
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
def test_unkown_url_without_trailing_slash_if_not_authenticated(self):
url = reverse("admin:article_extra_json")[:-1]
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, target_status_code=403
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_script_name(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1], SCRIPT_NAME="/prefix/")
self.assertRedirects(
response,
"/prefix" + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True, FORCE_SCRIPT_NAME="/prefix/")
def test_missing_slash_append_slash_true_force_script_name(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
"/prefix" + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_non_staff_user(self):
user = User.objects.create_user(
username="user",
password="secret",
email="[email protected]",
is_staff=False,
)
self.client.force_login(user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
"/test_admin/admin/login/?next=/test_admin/admin/admin_views/article",
)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_single_model_no_append_slash(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin9:admin_views_actor_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Same tests above with final_catch_all_view=False.
def test_unknown_url_404_if_not_authenticated_without_final_catch_all_view(self):
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_unknown_url_404_if_authenticated_without_final_catch_all_view(self):
superuser = User.objects.create_superuser(
username="super",
password="secret",
email="[email protected]",
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_auth_without_final_catch_all_view(
self,
):
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin10:login"), known_url)
)
def test_known_url_missing_slash_redirects_with_slash_if_not_auth_no_catch_all_view(
self,
):
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, fetch_redirect_response=False
)
def test_non_admin_url_shares_url_prefix_without_final_catch_all_view(self):
url = reverse("non_admin10")
response = self.client.get(url[:-1])
self.assertRedirects(response, url, status_code=301)
def test_url_no_trailing_slash_if_not_auth_without_final_catch_all_view(
self,
):
url = reverse("admin10:article_extra_json")
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin10:login"), url))
def test_unknown_url_no_trailing_slash_if_not_auth_without_final_catch_all_view(
self,
):
url = reverse("admin10:article_extra_json")[:-1]
response = self.client.get(url)
# Matches test_admin/admin10/admin_views/article/<path:object_id>/
self.assertRedirects(
response, url + "/", status_code=301, fetch_redirect_response=False
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url_without_final_catch_all_view(
self,
):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_without_final_catch_all_view(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, target_status_code=403
)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false_without_final_catch_all_view(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Outside admin.
def test_non_admin_url_404_if_not_authenticated(self):
unknown_url = "/unknown/"
response = self.client.get(unknown_url)
# Does not redirect to the admin login.
self.assertEqual(response.status_code, 404)
|
f024b75cf8adcd7cdff3909cea020a1e1815d1b7622187595895bca3bcdf6b1c | import logging
from contextlib import contextmanager
from io import StringIO
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.core import mail
from django.core.exceptions import DisallowedHost, PermissionDenied, SuspiciousOperation
from django.core.files.temp import NamedTemporaryFile
from django.core.management import color
from django.http.multipartparser import MultiPartParserError
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import LoggingCaptureMixin
from django.utils.log import (
DEFAULT_LOGGING,
AdminEmailHandler,
CallbackFilter,
RequireDebugFalse,
RequireDebugTrue,
ServerFormatter,
)
from django.views.debug import ExceptionReporter
from . import views
from .logconfig import MyEmailBackend
class LoggingFiltersTest(SimpleTestCase):
def test_require_debug_false_filter(self):
"""
Test the RequireDebugFalse filter class.
"""
filter_ = RequireDebugFalse()
with self.settings(DEBUG=True):
self.assertIs(filter_.filter("record is not used"), False)
with self.settings(DEBUG=False):
self.assertIs(filter_.filter("record is not used"), True)
def test_require_debug_true_filter(self):
"""
Test the RequireDebugTrue filter class.
"""
filter_ = RequireDebugTrue()
with self.settings(DEBUG=True):
self.assertIs(filter_.filter("record is not used"), True)
with self.settings(DEBUG=False):
self.assertIs(filter_.filter("record is not used"), False)
class SetupDefaultLoggingMixin:
@classmethod
def setUpClass(cls):
super().setUpClass()
logging.config.dictConfig(DEFAULT_LOGGING)
cls.addClassCleanup(logging.config.dictConfig, settings.LOGGING)
class DefaultLoggingTests(
SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase
):
def test_django_logger(self):
"""
The 'django' base logger only output anything when DEBUG=True.
"""
self.logger.error("Hey, this is an error.")
self.assertEqual(self.logger_output.getvalue(), "")
with self.settings(DEBUG=True):
self.logger.error("Hey, this is an error.")
self.assertEqual(self.logger_output.getvalue(), "Hey, this is an error.\n")
@override_settings(DEBUG=True)
def test_django_logger_warning(self):
self.logger.warning("warning")
self.assertEqual(self.logger_output.getvalue(), "warning\n")
@override_settings(DEBUG=True)
def test_django_logger_info(self):
self.logger.info("info")
self.assertEqual(self.logger_output.getvalue(), "info\n")
@override_settings(DEBUG=True)
def test_django_logger_debug(self):
self.logger.debug("debug")
self.assertEqual(self.logger_output.getvalue(), "")
class LoggingAssertionMixin:
def assertLogsRequest(
self, url, level, msg, status_code, logger="django.request", exc_class=None
):
with self.assertLogs(logger, level) as cm:
try:
self.client.get(url)
except views.UncaughtException:
pass
self.assertEqual(
len(cm.records),
1,
"Wrong number of calls for logger %r in %r level." % (logger, level),
)
record = cm.records[0]
self.assertEqual(record.getMessage(), msg)
self.assertEqual(record.status_code, status_code)
if exc_class:
self.assertIsNotNone(record.exc_info)
self.assertEqual(record.exc_info[0], exc_class)
@override_settings(DEBUG=True, ROOT_URLCONF="logging_tests.urls")
class HandlerLoggingTests(
SetupDefaultLoggingMixin, LoggingAssertionMixin, LoggingCaptureMixin, SimpleTestCase
):
def test_page_found_no_warning(self):
self.client.get("/innocent/")
self.assertEqual(self.logger_output.getvalue(), "")
def test_redirect_no_warning(self):
self.client.get("/redirect/")
self.assertEqual(self.logger_output.getvalue(), "")
def test_page_not_found_warning(self):
self.assertLogsRequest(
url="/does_not_exist/",
level="WARNING",
status_code=404,
msg="Not Found: /does_not_exist/",
)
def test_page_not_found_raised(self):
self.assertLogsRequest(
url="/does_not_exist_raised/",
level="WARNING",
status_code=404,
msg="Not Found: /does_not_exist_raised/",
)
def test_uncaught_exception(self):
self.assertLogsRequest(
url="/uncaught_exception/",
level="ERROR",
status_code=500,
msg="Internal Server Error: /uncaught_exception/",
exc_class=views.UncaughtException,
)
def test_internal_server_error(self):
self.assertLogsRequest(
url="/internal_server_error/",
level="ERROR",
status_code=500,
msg="Internal Server Error: /internal_server_error/",
)
def test_internal_server_error_599(self):
self.assertLogsRequest(
url="/internal_server_error/?status=599",
level="ERROR",
status_code=599,
msg="Unknown Status Code: /internal_server_error/",
)
def test_permission_denied(self):
self.assertLogsRequest(
url="/permission_denied/",
level="WARNING",
status_code=403,
msg="Forbidden (Permission denied): /permission_denied/",
exc_class=PermissionDenied,
)
def test_multi_part_parser_error(self):
self.assertLogsRequest(
url="/multi_part_parser_error/",
level="WARNING",
status_code=400,
msg="Bad request (Unable to parse request body): /multi_part_parser_error/",
exc_class=MultiPartParserError,
)
@override_settings(
DEBUG=True,
USE_I18N=True,
LANGUAGES=[("en", "English")],
MIDDLEWARE=[
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
],
ROOT_URLCONF="logging_tests.urls_i18n",
)
class I18nLoggingTests(SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase):
def test_i18n_page_found_no_warning(self):
self.client.get("/exists/")
self.client.get("/en/exists/")
self.assertEqual(self.logger_output.getvalue(), "")
def test_i18n_page_not_found_warning(self):
self.client.get("/this_does_not/")
self.client.get("/en/nor_this/")
self.assertEqual(
self.logger_output.getvalue(),
"Not Found: /this_does_not/\nNot Found: /en/nor_this/\n",
)
class CallbackFilterTest(SimpleTestCase):
def test_sense(self):
f_false = CallbackFilter(lambda r: False)
f_true = CallbackFilter(lambda r: True)
self.assertFalse(f_false.filter("record"))
self.assertTrue(f_true.filter("record"))
def test_passes_on_record(self):
collector = []
def _callback(record):
collector.append(record)
return True
f = CallbackFilter(_callback)
f.filter("a record")
self.assertEqual(collector, ["a record"])
class AdminEmailHandlerTest(SimpleTestCase):
logger = logging.getLogger("django")
request_factory = RequestFactory()
def get_admin_email_handler(self, logger):
# AdminEmailHandler does not get filtered out
# even with DEBUG=True.
return [
h for h in logger.handlers if h.__class__.__name__ == "AdminEmailHandler"
][0]
def test_fail_silently(self):
admin_email_handler = self.get_admin_email_handler(self.logger)
self.assertTrue(admin_email_handler.connection().fail_silently)
@override_settings(
ADMINS=[("whatever admin", "[email protected]")],
EMAIL_SUBJECT_PREFIX="-SuperAwesomeSubject-",
)
def test_accepts_args(self):
"""
User-supplied arguments and the EMAIL_SUBJECT_PREFIX setting are used
to compose the email subject (#16736).
"""
message = "Custom message that says '%s' and '%s'"
token1 = "ping"
token2 = "pong"
admin_email_handler = self.get_admin_email_handler(self.logger)
# Backup then override original filters
orig_filters = admin_email_handler.filters
try:
admin_email_handler.filters = []
self.logger.error(message, token1, token2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ["[email protected]"])
self.assertEqual(
mail.outbox[0].subject,
"-SuperAwesomeSubject-ERROR: "
"Custom message that says 'ping' and 'pong'",
)
finally:
# Restore original filters
admin_email_handler.filters = orig_filters
@override_settings(
ADMINS=[("whatever admin", "[email protected]")],
EMAIL_SUBJECT_PREFIX="-SuperAwesomeSubject-",
INTERNAL_IPS=["127.0.0.1"],
)
def test_accepts_args_and_request(self):
"""
The subject is also handled if being passed a request object.
"""
message = "Custom message that says '%s' and '%s'"
token1 = "ping"
token2 = "pong"
admin_email_handler = self.get_admin_email_handler(self.logger)
# Backup then override original filters
orig_filters = admin_email_handler.filters
try:
admin_email_handler.filters = []
request = self.request_factory.get("/")
self.logger.error(
message,
token1,
token2,
extra={
"status_code": 403,
"request": request,
},
)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ["[email protected]"])
self.assertEqual(
mail.outbox[0].subject,
"-SuperAwesomeSubject-ERROR (internal IP): "
"Custom message that says 'ping' and 'pong'",
)
finally:
# Restore original filters
admin_email_handler.filters = orig_filters
@override_settings(
ADMINS=[("admin", "[email protected]")],
EMAIL_SUBJECT_PREFIX="",
DEBUG=False,
)
def test_subject_accepts_newlines(self):
"""
Newlines in email reports' subjects are escaped to prevent
AdminErrorHandler from failing (#17281).
"""
message = "Message \r\n with newlines"
expected_subject = "ERROR: Message \\r\\n with newlines"
self.assertEqual(len(mail.outbox), 0)
self.logger.error(message)
self.assertEqual(len(mail.outbox), 1)
self.assertNotIn("\n", mail.outbox[0].subject)
self.assertNotIn("\r", mail.outbox[0].subject)
self.assertEqual(mail.outbox[0].subject, expected_subject)
@override_settings(
ADMINS=[("admin", "[email protected]")],
DEBUG=False,
)
def test_uses_custom_email_backend(self):
"""
Refs #19325
"""
message = "All work and no play makes Jack a dull boy"
admin_email_handler = self.get_admin_email_handler(self.logger)
mail_admins_called = {"called": False}
def my_mail_admins(*args, **kwargs):
connection = kwargs["connection"]
self.assertIsInstance(connection, MyEmailBackend)
mail_admins_called["called"] = True
# Monkeypatches
orig_mail_admins = mail.mail_admins
orig_email_backend = admin_email_handler.email_backend
mail.mail_admins = my_mail_admins
admin_email_handler.email_backend = "logging_tests.logconfig.MyEmailBackend"
try:
self.logger.error(message)
self.assertTrue(mail_admins_called["called"])
finally:
# Revert Monkeypatches
mail.mail_admins = orig_mail_admins
admin_email_handler.email_backend = orig_email_backend
@override_settings(
ADMINS=[("whatever admin", "[email protected]")],
)
def test_emit_non_ascii(self):
"""
#23593 - AdminEmailHandler should allow Unicode characters in the
request.
"""
handler = self.get_admin_email_handler(self.logger)
record = self.logger.makeRecord(
"name", logging.ERROR, "function", "lno", "message", None, None
)
url_path = "/º"
record.request = self.request_factory.get(url_path)
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
msg = mail.outbox[0]
self.assertEqual(msg.to, ["[email protected]"])
self.assertEqual(msg.subject, "[Django] ERROR (EXTERNAL IP): message")
self.assertIn("Report at %s" % url_path, msg.body)
@override_settings(
MANAGERS=[("manager", "[email protected]")],
DEBUG=False,
)
def test_customize_send_mail_method(self):
class ManagerEmailHandler(AdminEmailHandler):
def send_mail(self, subject, message, *args, **kwargs):
mail.mail_managers(
subject, message, *args, connection=self.connection(), **kwargs
)
handler = ManagerEmailHandler()
record = self.logger.makeRecord(
"name", logging.ERROR, "function", "lno", "message", None, None
)
self.assertEqual(len(mail.outbox), 0)
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ["[email protected]"])
@override_settings(ALLOWED_HOSTS="example.com")
def test_disallowed_host_doesnt_crash(self):
admin_email_handler = self.get_admin_email_handler(self.logger)
old_include_html = admin_email_handler.include_html
# Text email
admin_email_handler.include_html = False
try:
self.client.get("/", headers={"host": "evil.com"})
finally:
admin_email_handler.include_html = old_include_html
# HTML email
admin_email_handler.include_html = True
try:
self.client.get("/", headers={"host": "evil.com"})
finally:
admin_email_handler.include_html = old_include_html
def test_default_exception_reporter_class(self):
admin_email_handler = self.get_admin_email_handler(self.logger)
self.assertEqual(admin_email_handler.reporter_class, ExceptionReporter)
@override_settings(ADMINS=[("A.N.Admin", "[email protected]")])
def test_custom_exception_reporter_is_used(self):
record = self.logger.makeRecord(
"name", logging.ERROR, "function", "lno", "message", None, None
)
record.request = self.request_factory.get("/")
handler = AdminEmailHandler(
reporter_class="logging_tests.logconfig.CustomExceptionReporter"
)
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
msg = mail.outbox[0]
self.assertEqual(msg.body, "message\n\ncustom traceback text")
@override_settings(ADMINS=[("admin", "[email protected]")])
def test_emit_no_form_tag(self):
"""HTML email doesn't contain forms."""
handler = AdminEmailHandler(include_html=True)
record = self.logger.makeRecord(
"name",
logging.ERROR,
"function",
"lno",
"message",
None,
None,
)
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
msg = mail.outbox[0]
self.assertEqual(msg.subject, "[Django] ERROR: message")
self.assertEqual(len(msg.alternatives), 1)
body_html = str(msg.alternatives[0][0])
self.assertIn('<div id="traceback">', body_html)
self.assertNotIn("<form", body_html)
class SettingsConfigTest(AdminScriptTestCase):
"""
Accessing settings in a custom logging handler does not trigger
a circular import error.
"""
def setUp(self):
super().setUp()
log_config = """{
'version': 1,
'handlers': {
'custom_handler': {
'level': 'INFO',
'class': 'logging_tests.logconfig.MyHandler',
}
}
}"""
self.write_settings("settings.py", sdict={"LOGGING": log_config})
def test_circular_dependency(self):
# validate is just an example command to trigger settings configuration
out, err = self.run_manage(["check"])
self.assertNoOutput(err)
self.assertOutput(out, "System check identified no issues (0 silenced).")
def dictConfig(config):
dictConfig.called = True
dictConfig.called = False
class SetupConfigureLogging(SimpleTestCase):
"""
Calling django.setup() initializes the logging configuration.
"""
def test_configure_initializes_logging(self):
from django import setup
try:
with override_settings(
LOGGING_CONFIG="logging_tests.tests.dictConfig",
):
setup()
finally:
# Restore logging from settings.
setup()
self.assertTrue(dictConfig.called)
@override_settings(DEBUG=True, ROOT_URLCONF="logging_tests.urls")
class SecurityLoggerTest(LoggingAssertionMixin, SimpleTestCase):
def test_suspicious_operation_creates_log_message(self):
self.assertLogsRequest(
url="/suspicious/",
level="ERROR",
msg="dubious",
status_code=400,
logger="django.security.SuspiciousOperation",
exc_class=SuspiciousOperation,
)
def test_suspicious_operation_uses_sublogger(self):
self.assertLogsRequest(
url="/suspicious_spec/",
level="ERROR",
msg="dubious",
status_code=400,
logger="django.security.DisallowedHost",
exc_class=DisallowedHost,
)
@override_settings(
ADMINS=[("admin", "[email protected]")],
DEBUG=False,
)
def test_suspicious_email_admins(self):
self.client.get("/suspicious/")
self.assertEqual(len(mail.outbox), 1)
self.assertIn("SuspiciousOperation at /suspicious/", mail.outbox[0].body)
class SettingsCustomLoggingTest(AdminScriptTestCase):
"""
Using a logging defaults are still applied when using a custom
callable in LOGGING_CONFIG (i.e., logging.config.fileConfig).
"""
def setUp(self):
super().setUp()
logging_conf = """
[loggers]
keys=root
[handlers]
keys=stream
[formatters]
keys=simple
[logger_root]
handlers=stream
[handler_stream]
class=StreamHandler
formatter=simple
args=(sys.stdout,)
[formatter_simple]
format=%(message)s
"""
self.temp_file = NamedTemporaryFile()
self.temp_file.write(logging_conf.encode())
self.temp_file.flush()
self.write_settings(
"settings.py",
sdict={
"LOGGING_CONFIG": '"logging.config.fileConfig"',
"LOGGING": 'r"%s"' % self.temp_file.name,
},
)
def tearDown(self):
self.temp_file.close()
def test_custom_logging(self):
out, err = self.run_manage(["check"])
self.assertNoOutput(err)
self.assertOutput(out, "System check identified no issues (0 silenced).")
class LogFormattersTests(SimpleTestCase):
def test_server_formatter_styles(self):
color_style = color.make_style("")
formatter = ServerFormatter()
formatter.style = color_style
log_msg = "log message"
status_code_styles = [
(200, "HTTP_SUCCESS"),
(100, "HTTP_INFO"),
(304, "HTTP_NOT_MODIFIED"),
(300, "HTTP_REDIRECT"),
(404, "HTTP_NOT_FOUND"),
(400, "HTTP_BAD_REQUEST"),
(500, "HTTP_SERVER_ERROR"),
]
for status_code, style in status_code_styles:
record = logging.makeLogRecord({"msg": log_msg, "status_code": status_code})
self.assertEqual(
formatter.format(record), getattr(color_style, style)(log_msg)
)
record = logging.makeLogRecord({"msg": log_msg})
self.assertEqual(formatter.format(record), log_msg)
def test_server_formatter_default_format(self):
server_time = "2016-09-25 10:20:30"
log_msg = "log message"
logger = logging.getLogger("django.server")
@contextmanager
def patch_django_server_logger():
old_stream = logger.handlers[0].stream
new_stream = StringIO()
logger.handlers[0].stream = new_stream
yield new_stream
logger.handlers[0].stream = old_stream
with patch_django_server_logger() as logger_output:
logger.info(log_msg, extra={"server_time": server_time})
self.assertEqual(
"[%s] %s\n" % (server_time, log_msg), logger_output.getvalue()
)
with patch_django_server_logger() as logger_output:
logger.info(log_msg)
self.assertRegex(
logger_output.getvalue(), r"^\[[/:,\w\s\d]+\] %s\n" % log_msg
)
|
194325f463a9801a583a53b57633aeb9b6efd4c557a31c9ff867de3383b7ea2e | import datetime
from django.core import signing
from django.test import SimpleTestCase, override_settings
from django.test.utils import freeze_time, ignore_warnings
from django.utils.crypto import InvalidAlgorithm
from django.utils.deprecation import RemovedInDjango51Warning
class TestSigner(SimpleTestCase):
def test_signature(self):
"signature() method should generate a signature"
signer = signing.Signer(key="predictable-secret")
signer2 = signing.Signer(key="predictable-secret2")
for s in (
b"hello",
b"3098247:529:087:",
"\u2019".encode(),
):
self.assertEqual(
signer.signature(s),
signing.base64_hmac(
signer.salt + "signer",
s,
"predictable-secret",
algorithm=signer.algorithm,
),
)
self.assertNotEqual(signer.signature(s), signer2.signature(s))
def test_signature_with_salt(self):
signer = signing.Signer(key="predictable-secret", salt="extra-salt")
self.assertEqual(
signer.signature("hello"),
signing.base64_hmac(
"extra-salt" + "signer",
"hello",
"predictable-secret",
algorithm=signer.algorithm,
),
)
self.assertNotEqual(
signing.Signer(key="predictable-secret", salt="one").signature("hello"),
signing.Signer(key="predictable-secret", salt="two").signature("hello"),
)
def test_custom_algorithm(self):
signer = signing.Signer(key="predictable-secret", algorithm="sha512")
self.assertEqual(
signer.signature("hello"),
"Usf3uVQOZ9m6uPfVonKR-EBXjPe7bjMbp3_Fq8MfsptgkkM1ojidN0BxYaT5HAEN1"
"VzO9_jVu7R-VkqknHYNvw",
)
def test_invalid_algorithm(self):
signer = signing.Signer(key="predictable-secret", algorithm="whatever")
msg = "'whatever' is not an algorithm accepted by the hashlib module."
with self.assertRaisesMessage(InvalidAlgorithm, msg):
signer.sign("hello")
def test_sign_unsign(self):
"sign/unsign should be reversible"
signer = signing.Signer(key="predictable-secret")
examples = [
"q;wjmbk;wkmb",
"3098247529087",
"3098247:529:087:",
"jkw osanteuh ,rcuh nthu aou oauh ,ud du",
"\u2019",
]
for example in examples:
signed = signer.sign(example)
self.assertIsInstance(signed, str)
self.assertNotEqual(example, signed)
self.assertEqual(example, signer.unsign(signed))
def test_sign_unsign_non_string(self):
signer = signing.Signer(key="predictable-secret")
values = [
123,
1.23,
True,
datetime.date.today(),
]
for value in values:
with self.subTest(value):
signed = signer.sign(value)
self.assertIsInstance(signed, str)
self.assertNotEqual(signed, value)
self.assertEqual(signer.unsign(signed), str(value))
def test_unsign_detects_tampering(self):
"unsign should raise an exception if the value has been tampered with"
signer = signing.Signer(key="predictable-secret")
value = "Another string"
signed_value = signer.sign(value)
transforms = (
lambda s: s.upper(),
lambda s: s + "a",
lambda s: "a" + s[1:],
lambda s: s.replace(":", ""),
)
self.assertEqual(value, signer.unsign(signed_value))
for transform in transforms:
with self.assertRaises(signing.BadSignature):
signer.unsign(transform(signed_value))
def test_sign_unsign_object(self):
signer = signing.Signer(key="predictable-secret")
tests = [
["a", "list"],
"a string \u2019",
{"a": "dictionary"},
]
for obj in tests:
with self.subTest(obj=obj):
signed_obj = signer.sign_object(obj)
self.assertNotEqual(obj, signed_obj)
self.assertEqual(obj, signer.unsign_object(signed_obj))
signed_obj = signer.sign_object(obj, compress=True)
self.assertNotEqual(obj, signed_obj)
self.assertEqual(obj, signer.unsign_object(signed_obj))
def test_dumps_loads(self):
"dumps and loads be reversible for any JSON serializable object"
objects = [
["a", "list"],
"a string \u2019",
{"a": "dictionary"},
]
for o in objects:
self.assertNotEqual(o, signing.dumps(o))
self.assertEqual(o, signing.loads(signing.dumps(o)))
self.assertNotEqual(o, signing.dumps(o, compress=True))
self.assertEqual(o, signing.loads(signing.dumps(o, compress=True)))
def test_decode_detects_tampering(self):
"loads should raise exception for tampered objects"
transforms = (
lambda s: s.upper(),
lambda s: s + "a",
lambda s: "a" + s[1:],
lambda s: s.replace(":", ""),
)
value = {
"foo": "bar",
"baz": 1,
}
encoded = signing.dumps(value)
self.assertEqual(value, signing.loads(encoded))
for transform in transforms:
with self.assertRaises(signing.BadSignature):
signing.loads(transform(encoded))
def test_works_with_non_ascii_keys(self):
binary_key = b"\xe7" # Set some binary (non-ASCII key)
s = signing.Signer(key=binary_key)
self.assertEqual(
"foo:EE4qGC5MEKyQG5msxYA0sBohAxLC0BJf8uRhemh0BGU",
s.sign("foo"),
)
def test_valid_sep(self):
separators = ["/", "*sep*", ","]
for sep in separators:
signer = signing.Signer(key="predictable-secret", sep=sep)
self.assertEqual(
"foo%sjZQoX_FtSO70jX9HLRGg2A_2s4kdDBxz1QoO_OpEQb0" % sep,
signer.sign("foo"),
)
def test_invalid_sep(self):
"""should warn on invalid separator"""
msg = (
"Unsafe Signer separator: %r (cannot be empty or consist of only A-z0-9-_=)"
)
separators = ["", "-", "abc"]
for sep in separators:
with self.assertRaisesMessage(ValueError, msg % sep):
signing.Signer(sep=sep)
def test_verify_with_non_default_key(self):
old_signer = signing.Signer(key="secret")
new_signer = signing.Signer(
key="newsecret", fallback_keys=["othersecret", "secret"]
)
signed = old_signer.sign("abc")
self.assertEqual(new_signer.unsign(signed), "abc")
def test_sign_unsign_multiple_keys(self):
"""The default key is a valid verification key."""
signer = signing.Signer(key="secret", fallback_keys=["oldsecret"])
signed = signer.sign("abc")
self.assertEqual(signer.unsign(signed), "abc")
@override_settings(
SECRET_KEY="secret",
SECRET_KEY_FALLBACKS=["oldsecret"],
)
def test_sign_unsign_ignore_secret_key_fallbacks(self):
old_signer = signing.Signer(key="oldsecret")
signed = old_signer.sign("abc")
signer = signing.Signer(fallback_keys=[])
with self.assertRaises(signing.BadSignature):
signer.unsign(signed)
@override_settings(
SECRET_KEY="secret",
SECRET_KEY_FALLBACKS=["oldsecret"],
)
def test_default_keys_verification(self):
old_signer = signing.Signer(key="oldsecret")
signed = old_signer.sign("abc")
signer = signing.Signer()
self.assertEqual(signer.unsign(signed), "abc")
class TestTimestampSigner(SimpleTestCase):
def test_timestamp_signer(self):
value = "hello"
with freeze_time(123456789):
signer = signing.TimestampSigner(key="predictable-key")
ts = signer.sign(value)
self.assertNotEqual(ts, signing.Signer(key="predictable-key").sign(value))
self.assertEqual(signer.unsign(ts), value)
with freeze_time(123456800):
self.assertEqual(signer.unsign(ts, max_age=12), value)
# max_age parameter can also accept a datetime.timedelta object
self.assertEqual(
signer.unsign(ts, max_age=datetime.timedelta(seconds=11)), value
)
with self.assertRaises(signing.SignatureExpired):
signer.unsign(ts, max_age=10)
class TestBase62(SimpleTestCase):
def test_base62(self):
tests = [-(10**10), 10**10, 1620378259, *range(-100, 100)]
for i in tests:
self.assertEqual(i, signing.b62_decode(signing.b62_encode(i)))
class SignerPositionalArgumentsDeprecationTests(SimpleTestCase):
def test_deprecation(self):
msg = "Passing positional arguments to Signer is deprecated."
with self.assertRaisesMessage(RemovedInDjango51Warning, msg):
signing.Signer("predictable-secret")
msg = "Passing positional arguments to TimestampSigner is deprecated."
with self.assertRaisesMessage(RemovedInDjango51Warning, msg):
signing.TimestampSigner("predictable-secret")
@ignore_warnings(category=RemovedInDjango51Warning)
def test_positional_arguments(self):
signer = signing.Signer("secret", "/", "somesalt", "sha1", ["oldsecret"])
signed = signer.sign("xyz")
self.assertEqual(signed, "xyz/zzdO_8rk-NGnm8jNasXRTF2P5kY")
self.assertEqual(signer.unsign(signed), "xyz")
old_signer = signing.Signer("oldsecret", "/", "somesalt", "sha1")
signed = old_signer.sign("xyz")
self.assertEqual(signer.unsign(signed), "xyz")
|
fcb0e58ecf9d43887dcc3813eb416a207ff729b4f2d2aacbad2b69a6754b2cef | import sys
import unittest
from django.conf import settings
from django.contrib import admin
from django.contrib.admindocs import utils, views
from django.contrib.admindocs.views import get_return_data_type, simplify_regex
from django.contrib.sites.models import Site
from django.db import models
from django.db.models import fields
from django.test import SimpleTestCase, modify_settings, override_settings
from django.test.utils import captured_stderr
from django.urls import include, path, reverse
from django.utils.functional import SimpleLazyObject
from .models import Company, Person
from .tests import AdminDocsTestCase, TestDataMixin
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewTests(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_index(self):
response = self.client.get(reverse("django-admindocs-docroot"))
self.assertContains(response, "<h1>Documentation</h1>", html=True)
self.assertContains(
response,
'<h1 id="site-name"><a href="/admin/">Django administration</a></h1>',
)
self.client.logout()
response = self.client.get(reverse("django-admindocs-docroot"), follow=True)
# Should display the login screen
self.assertContains(
response, '<input type="hidden" name="next" value="/admindocs/">', html=True
)
def test_bookmarklets(self):
response = self.client.get(reverse("django-admindocs-bookmarklets"))
self.assertContains(response, "/admindocs/views/")
def test_templatetag_index(self):
response = self.client.get(reverse("django-admindocs-tags"))
self.assertContains(
response, '<h3 id="built_in-extends">extends</h3>', html=True
)
def test_templatefilter_index(self):
response = self.client.get(reverse("django-admindocs-filters"))
self.assertContains(response, '<h3 id="built_in-first">first</h3>', html=True)
def test_view_index(self):
response = self.client.get(reverse("django-admindocs-views-index"))
self.assertContains(
response,
'<h3><a href="/admindocs/views/django.contrib.admindocs.views.'
'BaseAdminDocsView/">/admindocs/</a></h3>',
html=True,
)
self.assertContains(response, "Views by namespace test")
self.assertContains(response, "Name: <code>test:func</code>.")
self.assertContains(
response,
'<h3><a href="/admindocs/views/admin_docs.views.XViewCallableObject/">'
"/xview/callable_object_without_xview/</a></h3>",
html=True,
)
def test_view_index_with_method(self):
"""
Views that are methods are listed correctly.
"""
response = self.client.get(reverse("django-admindocs-views-index"))
self.assertContains(
response,
"<h3>"
'<a href="/admindocs/views/django.contrib.admin.sites.AdminSite.index/">'
"/admin/</a></h3>",
html=True,
)
def test_view_detail(self):
url = reverse(
"django-admindocs-views-detail",
args=["django.contrib.admindocs.views.BaseAdminDocsView"],
)
response = self.client.get(url)
# View docstring
self.assertContains(response, "Base view for admindocs views.")
@override_settings(ROOT_URLCONF="admin_docs.namespace_urls")
def test_namespaced_view_detail(self):
url = reverse(
"django-admindocs-views-detail", args=["admin_docs.views.XViewClass"]
)
response = self.client.get(url)
self.assertContains(response, "<h1>admin_docs.views.XViewClass</h1>")
def test_view_detail_illegal_import(self):
url = reverse(
"django-admindocs-views-detail",
args=["urlpatterns_reverse.nonimported_module.view"],
)
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_view_detail_as_method(self):
"""
Views that are methods can be displayed.
"""
url = reverse(
"django-admindocs-views-detail",
args=["django.contrib.admin.sites.AdminSite.index"],
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_model_index(self):
response = self.client.get(reverse("django-admindocs-models-index"))
self.assertContains(
response,
'<h2 id="app-auth">Authentication and Authorization (django.contrib.auth)'
"</h2>",
html=True,
)
def test_template_detail(self):
response = self.client.get(
reverse(
"django-admindocs-templates", args=["admin_doc/template_detail.html"]
)
)
self.assertContains(
response,
"<h1>Template: <q>admin_doc/template_detail.html</q></h1>",
html=True,
)
def test_missing_docutils(self):
utils.docutils_is_available = False
try:
response = self.client.get(reverse("django-admindocs-docroot"))
self.assertContains(
response,
"<h3>The admin documentation system requires Python’s "
'<a href="https://docutils.sourceforge.io/">docutils</a> '
"library.</h3>"
"<p>Please ask your administrators to install "
'<a href="https://docutils.sourceforge.io/">docutils</a>.</p>',
html=True,
)
self.assertContains(
response,
'<h1 id="site-name"><a href="/admin/">Django administration</a></h1>',
)
finally:
utils.docutils_is_available = True
@modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"})
@override_settings(SITE_ID=None) # will restore SITE_ID after the test
def test_no_sites_framework(self):
"""
Without the sites framework, should not access SITE_ID or Site
objects. Deleting settings is fine here as UserSettingsHolder is used.
"""
Site.objects.all().delete()
del settings.SITE_ID
response = self.client.get(reverse("django-admindocs-views-index"))
self.assertContains(response, "View documentation")
def test_callable_urlconf(self):
"""
Index view should correctly resolve view patterns when ROOT_URLCONF is
not a string.
"""
def urlpatterns():
return (
path("admin/doc/", include("django.contrib.admindocs.urls")),
path("admin/", admin.site.urls),
)
with self.settings(ROOT_URLCONF=SimpleLazyObject(urlpatterns)):
response = self.client.get(reverse("django-admindocs-views-index"))
self.assertEqual(response.status_code, 200)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewDefaultEngineOnly(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_template_detail_path_traversal(self):
cases = ["/etc/passwd", "../passwd"]
for fpath in cases:
with self.subTest(path=fpath):
response = self.client.get(
reverse("django-admindocs-templates", args=[fpath]),
)
self.assertEqual(response.status_code, 400)
@override_settings(
TEMPLATES=[
{
"NAME": "ONE",
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
},
{
"NAME": "TWO",
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
},
]
)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewWithMultipleEngines(AdminDocViewTests):
def test_templatefilter_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse("django-admindocs-filters"))
self.assertContains(response, "<title>Template filters</title>", html=True)
def test_templatetag_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse("django-admindocs-tags"))
self.assertContains(response, "<title>Template tags</title>", html=True)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class TestModelDetailView(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
with captured_stderr() as self.docutils_stderr:
self.response = self.client.get(
reverse("django-admindocs-models-detail", args=["admin_docs", "Person"])
)
def test_method_excludes(self):
"""
Methods that begin with strings defined in
``django.contrib.admindocs.views.MODEL_METHODS_EXCLUDE``
shouldn't be displayed in the admin docs.
"""
self.assertContains(self.response, "<td>get_full_name</td>")
self.assertNotContains(self.response, "<td>_get_full_name</td>")
self.assertNotContains(self.response, "<td>add_image</td>")
self.assertNotContains(self.response, "<td>delete_image</td>")
self.assertNotContains(self.response, "<td>set_status</td>")
self.assertNotContains(self.response, "<td>save_changes</td>")
def test_methods_with_arguments(self):
"""
Methods that take arguments should also displayed.
"""
self.assertContains(self.response, "<h3>Methods with arguments</h3>")
self.assertContains(self.response, "<td>rename_company</td>")
self.assertContains(self.response, "<td>dummy_function</td>")
self.assertContains(self.response, "<td>suffix_company_name</td>")
def test_methods_with_arguments_display_arguments(self):
"""
Methods with arguments should have their arguments displayed.
"""
self.assertContains(self.response, "<td>new_name</td>")
def test_methods_with_arguments_display_arguments_default_value(self):
"""
Methods with keyword arguments should have their arguments displayed.
"""
self.assertContains(self.response, "<td>suffix='ltd'</td>")
def test_methods_with_multiple_arguments_display_arguments(self):
"""
Methods with multiple arguments should have all their arguments
displayed, but omitting 'self'.
"""
self.assertContains(
self.response, "<td>baz, rox, *some_args, **some_kwargs</td>"
)
def test_instance_of_property_methods_are_displayed(self):
"""Model properties are displayed as fields."""
self.assertContains(self.response, "<td>a_property</td>")
def test_instance_of_cached_property_methods_are_displayed(self):
"""Model cached properties are displayed as fields."""
self.assertContains(self.response, "<td>a_cached_property</td>")
def test_method_data_types(self):
company = Company.objects.create(name="Django")
person = Person.objects.create(
first_name="Human", last_name="User", company=company
)
self.assertEqual(
get_return_data_type(person.get_status_count.__name__), "Integer"
)
self.assertEqual(get_return_data_type(person.get_groups_list.__name__), "List")
def test_descriptions_render_correctly(self):
"""
The ``description`` field should render correctly for each field type.
"""
# help text in fields
self.assertContains(
self.response, "<td>first name - The person's first name</td>"
)
self.assertContains(
self.response, "<td>last name - The person's last name</td>"
)
# method docstrings
self.assertContains(self.response, "<p>Get the full name of the person</p>")
link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>'
markup = "<p>the related %s object</p>"
company_markup = markup % (link % ("admin_docs.company", "admin_docs.Company"))
# foreign keys
self.assertContains(self.response, company_markup)
# foreign keys with help text
self.assertContains(self.response, "%s\n - place of work" % company_markup)
# many to many fields
self.assertContains(
self.response,
"number of related %s objects"
% (link % ("admin_docs.group", "admin_docs.Group")),
)
self.assertContains(
self.response,
"all related %s objects"
% (link % ("admin_docs.group", "admin_docs.Group")),
)
# "raw" and "include" directives are disabled
self.assertContains(
self.response,
"<p>"raw" directive disabled.</p>",
)
self.assertContains(
self.response, ".. raw:: html\n :file: admin_docs/evilfile.txt"
)
self.assertContains(
self.response,
"<p>"include" directive disabled.</p>",
)
self.assertContains(self.response, ".. include:: admin_docs/evilfile.txt")
out = self.docutils_stderr.getvalue()
self.assertIn('"raw" directive disabled', out)
self.assertIn('"include" directive disabled', out)
def test_model_with_many_to_one(self):
link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>'
response = self.client.get(
reverse("django-admindocs-models-detail", args=["admin_docs", "company"])
)
self.assertContains(
response,
"number of related %s objects"
% (link % ("admin_docs.person", "admin_docs.Person")),
)
self.assertContains(
response,
"all related %s objects"
% (link % ("admin_docs.person", "admin_docs.Person")),
)
def test_model_with_no_backward_relations_render_only_relevant_fields(self):
"""
A model with ``related_name`` of `+` shouldn't show backward
relationship links.
"""
response = self.client.get(
reverse("django-admindocs-models-detail", args=["admin_docs", "family"])
)
fields = response.context_data.get("fields")
self.assertEqual(len(fields), 2)
def test_model_docstring_renders_correctly(self):
summary = (
'<h2 class="subhead"><p>Stores information about a person, related to '
'<a class="reference external" href="/admindocs/models/myapp.company/">'
"myapp.Company</a>.</p></h2>"
)
subheading = "<p><strong>Notes</strong></p>"
body = (
'<p>Use <tt class="docutils literal">save_changes()</tt> when saving this '
"object.</p>"
)
model_body = (
'<dl class="docutils"><dt><tt class="'
'docutils literal">company</tt></dt><dd>Field storing <a class="'
'reference external" href="/admindocs/models/myapp.company/">'
"myapp.Company</a> where the person works.</dd></dl>"
)
self.assertContains(self.response, "DESCRIPTION")
self.assertContains(self.response, summary, html=True)
self.assertContains(self.response, subheading, html=True)
self.assertContains(self.response, body, html=True)
self.assertContains(self.response, model_body, html=True)
def test_model_detail_title(self):
self.assertContains(self.response, "<h1>admin_docs.Person</h1>", html=True)
def test_app_not_found(self):
response = self.client.get(
reverse("django-admindocs-models-detail", args=["doesnotexist", "Person"])
)
self.assertEqual(response.context["exception"], "App 'doesnotexist' not found")
self.assertEqual(response.status_code, 404)
def test_model_not_found(self):
response = self.client.get(
reverse(
"django-admindocs-models-detail", args=["admin_docs", "doesnotexist"]
)
)
self.assertEqual(
response.context["exception"],
"Model 'doesnotexist' not found in app 'admin_docs'",
)
self.assertEqual(response.status_code, 404)
class CustomField(models.Field):
description = "A custom field type"
class DescriptionLackingField(models.Field):
pass
class TestFieldType(unittest.TestCase):
def test_field_name(self):
with self.assertRaises(AttributeError):
views.get_readable_field_data_type("NotAField")
def test_builtin_fields(self):
self.assertEqual(
views.get_readable_field_data_type(fields.BooleanField()),
"Boolean (Either True or False)",
)
def test_char_fields(self):
self.assertEqual(
views.get_readable_field_data_type(fields.CharField(max_length=255)),
"String (up to 255)",
)
self.assertEqual(
views.get_readable_field_data_type(fields.CharField()),
"String (unlimited)",
)
def test_custom_fields(self):
self.assertEqual(
views.get_readable_field_data_type(CustomField()), "A custom field type"
)
self.assertEqual(
views.get_readable_field_data_type(DescriptionLackingField()),
"Field of type: DescriptionLackingField",
)
class AdminDocViewFunctionsTests(SimpleTestCase):
def test_simplify_regex(self):
tests = (
# Named and unnamed groups.
(r"^(?P<a>\w+)/b/(?P<c>\w+)/$", "/<a>/b/<c>/"),
(r"^(?P<a>\w+)/b/(?P<c>\w+)$", "/<a>/b/<c>"),
(r"^(?P<a>\w+)/b/(?P<c>\w+)", "/<a>/b/<c>"),
(r"^(?P<a>\w+)/b/(\w+)$", "/<a>/b/<var>"),
(r"^(?P<a>\w+)/b/(\w+)", "/<a>/b/<var>"),
(r"^(?P<a>\w+)/b/((x|y)\w+)$", "/<a>/b/<var>"),
(r"^(?P<a>\w+)/b/((x|y)\w+)", "/<a>/b/<var>"),
(r"^(?P<a>(x|y))/b/(?P<c>\w+)$", "/<a>/b/<c>"),
(r"^(?P<a>(x|y))/b/(?P<c>\w+)", "/<a>/b/<c>"),
(r"^(?P<a>(x|y))/b/(?P<c>\w+)ab", "/<a>/b/<c>ab"),
(r"^(?P<a>(x|y)(\(|\)))/b/(?P<c>\w+)ab", "/<a>/b/<c>ab"),
# Non-capturing groups.
(r"^a(?:\w+)b", "/ab"),
(r"^a(?:(x|y))", "/a"),
(r"^(?:\w+(?:\w+))a", "/a"),
(r"^a(?:\w+)/b(?:\w+)", "/a/b"),
(r"(?P<a>\w+)/b/(?:\w+)c(?:\w+)", "/<a>/b/c"),
(r"(?P<a>\w+)/b/(\w+)/(?:\w+)c(?:\w+)", "/<a>/b/<var>/c"),
# Single and repeated metacharacters.
(r"^a", "/a"),
(r"^^a", "/a"),
(r"^^^a", "/a"),
(r"a$", "/a"),
(r"a$$", "/a"),
(r"a$$$", "/a"),
(r"a?", "/a"),
(r"a??", "/a"),
(r"a???", "/a"),
(r"a*", "/a"),
(r"a**", "/a"),
(r"a***", "/a"),
(r"a+", "/a"),
(r"a++", "/a"),
(r"a+++", "/a"),
(r"\Aa", "/a"),
(r"\A\Aa", "/a"),
(r"\A\A\Aa", "/a"),
(r"a\Z", "/a"),
(r"a\Z\Z", "/a"),
(r"a\Z\Z\Z", "/a"),
(r"\ba", "/a"),
(r"\b\ba", "/a"),
(r"\b\b\ba", "/a"),
(r"a\B", "/a"),
(r"a\B\B", "/a"),
(r"a\B\B\B", "/a"),
# Multiple mixed metacharacters.
(r"^a/?$", "/a/"),
(r"\Aa\Z", "/a"),
(r"\ba\B", "/a"),
# Escaped single metacharacters.
(r"\^a", r"/^a"),
(r"\\^a", r"/\\a"),
(r"\\\^a", r"/\\^a"),
(r"\\\\^a", r"/\\\\a"),
(r"\\\\\^a", r"/\\\\^a"),
(r"a\$", r"/a$"),
(r"a\\$", r"/a\\"),
(r"a\\\$", r"/a\\$"),
(r"a\\\\$", r"/a\\\\"),
(r"a\\\\\$", r"/a\\\\$"),
(r"a\?", r"/a?"),
(r"a\\?", r"/a\\"),
(r"a\\\?", r"/a\\?"),
(r"a\\\\?", r"/a\\\\"),
(r"a\\\\\?", r"/a\\\\?"),
(r"a\*", r"/a*"),
(r"a\\*", r"/a\\"),
(r"a\\\*", r"/a\\*"),
(r"a\\\\*", r"/a\\\\"),
(r"a\\\\\*", r"/a\\\\*"),
(r"a\+", r"/a+"),
(r"a\\+", r"/a\\"),
(r"a\\\+", r"/a\\+"),
(r"a\\\\+", r"/a\\\\"),
(r"a\\\\\+", r"/a\\\\+"),
(r"\\Aa", r"/\Aa"),
(r"\\\Aa", r"/\\a"),
(r"\\\\Aa", r"/\\\Aa"),
(r"\\\\\Aa", r"/\\\\a"),
(r"\\\\\\Aa", r"/\\\\\Aa"),
(r"a\\Z", r"/a\Z"),
(r"a\\\Z", r"/a\\"),
(r"a\\\\Z", r"/a\\\Z"),
(r"a\\\\\Z", r"/a\\\\"),
(r"a\\\\\\Z", r"/a\\\\\Z"),
# Escaped mixed metacharacters.
(r"^a\?$", r"/a?"),
(r"^a\\?$", r"/a\\"),
(r"^a\\\?$", r"/a\\?"),
(r"^a\\\\?$", r"/a\\\\"),
(r"^a\\\\\?$", r"/a\\\\?"),
# Adjacent escaped metacharacters.
(r"^a\?\$", r"/a?$"),
(r"^a\\?\\$", r"/a\\\\"),
(r"^a\\\?\\\$", r"/a\\?\\$"),
(r"^a\\\\?\\\\$", r"/a\\\\\\\\"),
(r"^a\\\\\?\\\\\$", r"/a\\\\?\\\\$"),
# Complex examples with metacharacters and (un)named groups.
(r"^\b(?P<slug>\w+)\B/(\w+)?", "/<slug>/<var>"),
(r"^\A(?P<slug>\w+)\Z", "/<slug>"),
)
for pattern, output in tests:
with self.subTest(pattern=pattern):
self.assertEqual(simplify_regex(pattern), output)
|
f6387103e1cc4dcf77bcf94e77607ce359b37613ccc77bdb41039ccae3b8dc76 | from asgiref.sync import iscoroutinefunction, markcoroutinefunction
from django.http import Http404, HttpResponse
from django.template import engines
from django.template.response import TemplateResponse
from django.utils.decorators import (
async_only_middleware,
sync_and_async_middleware,
sync_only_middleware,
)
log = []
class BaseMiddleware:
def __init__(self, get_response):
self.get_response = get_response
if iscoroutinefunction(self.get_response):
markcoroutinefunction(self)
def __call__(self, request):
return self.get_response(request)
class ProcessExceptionMiddleware(BaseMiddleware):
def process_exception(self, request, exception):
return HttpResponse("Exception caught")
@async_only_middleware
class AsyncProcessExceptionMiddleware(BaseMiddleware):
async def process_exception(self, request, exception):
return HttpResponse("Exception caught")
class ProcessExceptionLogMiddleware(BaseMiddleware):
def process_exception(self, request, exception):
log.append("process-exception")
class ProcessExceptionExcMiddleware(BaseMiddleware):
def process_exception(self, request, exception):
raise Exception("from process-exception")
class ProcessViewMiddleware(BaseMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
return HttpResponse("Processed view %s" % view_func.__name__)
@async_only_middleware
class AsyncProcessViewMiddleware(BaseMiddleware):
async def process_view(self, request, view_func, view_args, view_kwargs):
return HttpResponse("Processed view %s" % view_func.__name__)
class ProcessViewNoneMiddleware(BaseMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
log.append("processed view %s" % view_func.__name__)
return None
class ProcessViewTemplateResponseMiddleware(BaseMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
template = engines["django"].from_string(
"Processed view {{ view }}{% for m in mw %}\n{{ m }}{% endfor %}"
)
return TemplateResponse(
request,
template,
{"mw": [self.__class__.__name__], "view": view_func.__name__},
)
class TemplateResponseMiddleware(BaseMiddleware):
def process_template_response(self, request, response):
response.context_data["mw"].append(self.__class__.__name__)
return response
@async_only_middleware
class AsyncTemplateResponseMiddleware(BaseMiddleware):
async def process_template_response(self, request, response):
response.context_data["mw"].append(self.__class__.__name__)
return response
class LogMiddleware(BaseMiddleware):
def __call__(self, request):
response = self.get_response(request)
log.append((response.status_code, response.content))
return response
class NoTemplateResponseMiddleware(BaseMiddleware):
def process_template_response(self, request, response):
return None
@async_only_middleware
class AsyncNoTemplateResponseMiddleware(BaseMiddleware):
async def process_template_response(self, request, response):
return None
class NotFoundMiddleware(BaseMiddleware):
def __call__(self, request):
raise Http404("not found")
class PaymentMiddleware(BaseMiddleware):
def __call__(self, request):
response = self.get_response(request)
response.status_code = 402
return response
@async_only_middleware
def async_payment_middleware(get_response):
async def middleware(request):
response = await get_response(request)
response.status_code = 402
return response
return middleware
@sync_and_async_middleware
class SyncAndAsyncMiddleware(BaseMiddleware):
pass
@sync_only_middleware
class DecoratedPaymentMiddleware(PaymentMiddleware):
pass
class NotSyncOrAsyncMiddleware(BaseMiddleware):
"""Middleware that is deliberately neither sync or async."""
sync_capable = False
async_capable = False
def __call__(self, request):
return self.get_response(request)
|
6e1e79f7c35bffedef171eab6849467dd00a74d663b2c19d705b8572255d1a9a | import decimal
import enum
import json
import unittest
import uuid
from django import forms
from django.contrib.admin.utils import display_for_field
from django.core import checks, exceptions, serializers, validators
from django.core.exceptions import FieldError
from django.core.management import call_command
from django.db import IntegrityError, connection, models
from django.db.models.expressions import Exists, OuterRef, RawSQL, Value
from django.db.models.functions import Cast, JSONObject, Upper
from django.test import TransactionTestCase, override_settings, skipUnlessDBFeature
from django.test.utils import isolate_apps
from django.utils import timezone
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase, PostgreSQLWidgetTestCase
from .models import (
ArrayEnumModel,
ArrayFieldSubclass,
CharArrayModel,
DateTimeArrayModel,
IntegerArrayModel,
NestedIntegerArrayModel,
NullableIntegerArrayModel,
OtherTypesArrayModel,
PostgreSQLModel,
Tag,
)
try:
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.expressions import ArraySubquery
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.fields.array import IndexTransform, SliceTransform
from django.contrib.postgres.forms import (
SimpleArrayField,
SplitArrayField,
SplitArrayWidget,
)
from django.db.backends.postgresql.psycopg_any import NumericRange
except ImportError:
pass
@isolate_apps("postgres_tests")
class BasicTests(PostgreSQLSimpleTestCase):
def test_get_field_display(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
models.CharField(max_length=16),
choices=[
["Media", [(["vinyl", "cd"], "Audio")]],
(("mp3", "mp4"), "Digital"),
],
)
tests = (
(["vinyl", "cd"], "Audio"),
(("mp3", "mp4"), "Digital"),
(("a", "b"), "('a', 'b')"),
(["c", "d"], "['c', 'd']"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = MyModel(field=value)
self.assertEqual(instance.get_field_display(), display)
def test_get_field_display_nested_array(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
ArrayField(models.CharField(max_length=16)),
choices=[
[
"Media",
[([["vinyl", "cd"], ("x",)], "Audio")],
],
((["mp3"], ("mp4",)), "Digital"),
],
)
tests = (
([["vinyl", "cd"], ("x",)], "Audio"),
((["mp3"], ("mp4",)), "Digital"),
((("a", "b"), ("c",)), "(('a', 'b'), ('c',))"),
([["a", "b"], ["c"]], "[['a', 'b'], ['c']]"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = MyModel(field=value)
self.assertEqual(instance.get_field_display(), display)
class TestSaveLoad(PostgreSQLTestCase):
def test_integer(self):
instance = IntegerArrayModel(field=[1, 2, 3])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_char(self):
instance = CharArrayModel(field=["hello", "goodbye"])
instance.save()
loaded = CharArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_dates(self):
instance = DateTimeArrayModel(
datetimes=[timezone.now()],
dates=[timezone.now().date()],
times=[timezone.now().time()],
)
instance.save()
loaded = DateTimeArrayModel.objects.get()
self.assertEqual(instance.datetimes, loaded.datetimes)
self.assertEqual(instance.dates, loaded.dates)
self.assertEqual(instance.times, loaded.times)
def test_tuples(self):
instance = IntegerArrayModel(field=(1,))
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertSequenceEqual(instance.field, loaded.field)
def test_integers_passed_as_strings(self):
# This checks that get_prep_value is deferred properly
instance = IntegerArrayModel(field=["1"])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(loaded.field, [1])
def test_default_null(self):
instance = NullableIntegerArrayModel()
instance.save()
loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk)
self.assertIsNone(loaded.field)
self.assertEqual(instance.field, loaded.field)
def test_null_handling(self):
instance = NullableIntegerArrayModel(field=None)
instance.save()
loaded = NullableIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
instance = IntegerArrayModel(field=None)
with self.assertRaises(IntegrityError):
instance.save()
def test_nested(self):
instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]])
instance.save()
loaded = NestedIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_other_array_types(self):
instance = OtherTypesArrayModel(
ips=["192.168.0.1", "::1"],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=[Tag(1), Tag(2), Tag(3)],
json=[{"a": 1}, {"b": 2}],
int_ranges=[NumericRange(10, 20), NumericRange(30, 40)],
bigint_ranges=[
NumericRange(7000000000, 10000000000),
NumericRange(50000000000, 70000000000),
],
)
instance.save()
loaded = OtherTypesArrayModel.objects.get()
self.assertEqual(instance.ips, loaded.ips)
self.assertEqual(instance.uuids, loaded.uuids)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.tags, loaded.tags)
self.assertEqual(instance.json, loaded.json)
self.assertEqual(instance.int_ranges, loaded.int_ranges)
self.assertEqual(instance.bigint_ranges, loaded.bigint_ranges)
def test_null_from_db_value_handling(self):
instance = OtherTypesArrayModel.objects.create(
ips=["192.168.0.1", "::1"],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=None,
)
instance.refresh_from_db()
self.assertIsNone(instance.tags)
self.assertEqual(instance.json, [])
self.assertIsNone(instance.int_ranges)
self.assertIsNone(instance.bigint_ranges)
def test_model_set_on_base_field(self):
instance = IntegerArrayModel()
field = instance._meta.get_field("field")
self.assertEqual(field.model, IntegerArrayModel)
self.assertEqual(field.base_field.model, IntegerArrayModel)
def test_nested_nullable_base_field(self):
instance = NullableIntegerArrayModel.objects.create(
field_nested=[[None, None], [None, None]],
)
self.assertEqual(instance.field_nested, [[None, None], [None, None]])
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = NullableIntegerArrayModel.objects.bulk_create(
[
NullableIntegerArrayModel(order=1, field=[1]),
NullableIntegerArrayModel(order=2, field=[2]),
NullableIntegerArrayModel(order=3, field=[2, 3]),
NullableIntegerArrayModel(order=4, field=[20, 30, 40]),
NullableIntegerArrayModel(order=5, field=None),
]
)
def test_empty_list(self):
NullableIntegerArrayModel.objects.create(field=[])
obj = (
NullableIntegerArrayModel.objects.annotate(
empty_array=models.Value(
[], output_field=ArrayField(models.IntegerField())
),
)
.filter(field=models.F("empty_array"))
.get()
)
self.assertEqual(obj.field, [])
self.assertEqual(obj.empty_array, [])
def test_exact(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[1]), self.objs[:1]
)
def test_exact_null_only_array(self):
obj = NullableIntegerArrayModel.objects.create(
field=[None], field_nested=[None, None]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[None]), [obj]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field_nested__exact=[None, None]),
[obj],
)
def test_exact_null_only_nested_array(self):
obj1 = NullableIntegerArrayModel.objects.create(field_nested=[[None, None]])
obj2 = NullableIntegerArrayModel.objects.create(
field_nested=[[None, None], [None, None]],
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field_nested__exact=[[None, None]],
),
[obj1],
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field_nested__exact=[[None, None], [None, None]],
),
[obj2],
)
def test_exact_with_expression(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[Value(1)]),
self.objs[:1],
)
def test_exact_charfield(self):
instance = CharArrayModel.objects.create(field=["text"])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field=["text"]), [instance]
)
def test_exact_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field=[[1, 2], [3, 4]]), [instance]
)
def test_isnull(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__isnull=True), self.objs[-1:]
)
def test_gt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__gt=[0]), self.objs[:4]
)
def test_lt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__lt=[2]), self.objs[:1]
)
def test_in(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]),
self.objs[:2],
)
def test_in_subquery(self):
IntegerArrayModel.objects.create(field=[2, 3])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__in=IntegerArrayModel.objects.values_list("field", flat=True)
),
self.objs[2:3],
)
@unittest.expectedFailure
def test_in_including_F_object(self):
# This test asserts that Array objects passed to filters can be
# constructed to contain F objects. This currently doesn't work as the
# psycopg mogrify method that generates the ARRAY() syntax is
# expecting literals, not column references (#27095).
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[models.F("id")]]),
self.objs[:2],
)
def test_in_as_F_object(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[models.F("field")]),
self.objs[:4],
)
def test_contained_by(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]),
self.objs[:2],
)
def test_contained_by_including_F_object(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__contained_by=[models.F("order"), 2]
),
self.objs[:3],
)
def test_contains(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=[2]),
self.objs[1:3],
)
def test_contains_subquery(self):
IntegerArrayModel.objects.create(field=[2, 3])
inner_qs = IntegerArrayModel.objects.values_list("field", flat=True)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=inner_qs[:1]),
self.objs[2:3],
)
inner_qs = IntegerArrayModel.objects.filter(field__contains=OuterRef("field"))
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(Exists(inner_qs)),
self.objs[1:3],
)
def test_contains_including_expression(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__contains=[2, Value(6) / Value(2)],
),
self.objs[2:3],
)
def test_icontains(self):
# Using the __icontains lookup with ArrayField is inefficient.
instance = CharArrayModel.objects.create(field=["FoO"])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__icontains="foo"), [instance]
)
def test_contains_charfield(self):
# Regression for #22907
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contains=["text"]), []
)
def test_contained_by_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contained_by=["text"]), []
)
def test_overlap_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__overlap=["text"]), []
)
def test_overlap_charfield_including_expression(self):
obj_1 = CharArrayModel.objects.create(field=["TEXT", "lower text"])
obj_2 = CharArrayModel.objects.create(field=["lower text", "TEXT"])
CharArrayModel.objects.create(field=["lower text", "text"])
self.assertSequenceEqual(
CharArrayModel.objects.filter(
field__overlap=[
Upper(Value("text")),
"other",
]
),
[obj_1, obj_2],
)
def test_overlap_values(self):
qs = NullableIntegerArrayModel.objects.filter(order__lt=3)
self.assertCountEqual(
NullableIntegerArrayModel.objects.filter(
field__overlap=qs.values_list("field"),
),
self.objs[:3],
)
self.assertCountEqual(
NullableIntegerArrayModel.objects.filter(
field__overlap=qs.values("field"),
),
self.objs[:3],
)
def test_lookups_autofield_array(self):
qs = (
NullableIntegerArrayModel.objects.filter(
field__0__isnull=False,
)
.values("field__0")
.annotate(
arrayagg=ArrayAgg("id"),
)
.order_by("field__0")
)
tests = (
("contained_by", [self.objs[1].pk, self.objs[2].pk, 0], [2]),
("contains", [self.objs[2].pk], [2]),
("exact", [self.objs[3].pk], [20]),
("overlap", [self.objs[1].pk, self.objs[3].pk], [2, 20]),
)
for lookup, value, expected in tests:
with self.subTest(lookup=lookup):
self.assertSequenceEqual(
qs.filter(
**{"arrayagg__" + lookup: value},
).values_list("field__0", flat=True),
expected,
)
@skipUnlessDBFeature("allows_group_by_refs")
def test_group_by_order_by_aliases(self):
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__0__isnull=False,
)
.values("field__0")
.annotate(arrayagg=ArrayAgg("id"))
.order_by("field__0"),
[
{"field__0": 1, "arrayagg": [self.objs[0].pk]},
{"field__0": 2, "arrayagg": [self.objs[1].pk, self.objs[2].pk]},
{"field__0": 20, "arrayagg": [self.objs[3].pk]},
],
)
alias = connection.ops.quote_name("field__0")
sql = ctx[0]["sql"]
self.assertIn(f"GROUP BY {alias}", sql)
self.assertIn(f"ORDER BY {alias}", sql)
def test_index(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0=2), self.objs[1:3]
)
def test_index_chained(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0__lt=3), self.objs[0:3]
)
def test_index_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0=1), [instance]
)
@unittest.expectedFailure
def test_index_used_on_nested_data(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0=[1, 2]), [instance]
)
def test_index_transform_expression(self):
expr = RawSQL("string_to_array(%s, ';')", ["1;2"])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__0=Cast(
IndexTransform(1, models.IntegerField, expr),
output_field=models.IntegerField(),
),
),
self.objs[:1],
)
def test_index_annotation(self):
qs = NullableIntegerArrayModel.objects.annotate(second=models.F("field__1"))
self.assertCountEqual(
qs.values_list("second", flat=True),
[None, None, None, 3, 30],
)
def test_overlap(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]),
self.objs[0:3],
)
def test_len(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len__lte=2), self.objs[0:3]
)
def test_len_empty_array(self):
obj = NullableIntegerArrayModel.objects.create(field=[])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len=0), [obj]
)
def test_slice(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_1=[2]), self.objs[1:3]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]), self.objs[2:3]
)
def test_order_by_slice(self):
more_objs = (
NullableIntegerArrayModel.objects.create(field=[1, 637]),
NullableIntegerArrayModel.objects.create(field=[2, 1]),
NullableIntegerArrayModel.objects.create(field=[3, -98123]),
NullableIntegerArrayModel.objects.create(field=[4, 2]),
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.order_by("field__1"),
[
more_objs[2],
more_objs[1],
more_objs[3],
self.objs[2],
self.objs[3],
more_objs[0],
self.objs[4],
self.objs[1],
self.objs[0],
],
)
@unittest.expectedFailure
def test_slice_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]), [instance]
)
def test_slice_transform_expression(self):
expr = RawSQL("string_to_array(%s, ';')", ["9;2;3"])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__0_2=SliceTransform(2, 3, expr)
),
self.objs[2:3],
)
def test_slice_annotation(self):
qs = NullableIntegerArrayModel.objects.annotate(
first_two=models.F("field__0_2"),
)
self.assertCountEqual(
qs.values_list("first_two", flat=True),
[None, [1], [2], [2, 3], [20, 30]],
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
id__in=NullableIntegerArrayModel.objects.filter(field__len=3)
),
[self.objs[3]],
)
def test_enum_lookup(self):
class TestEnum(enum.Enum):
VALUE_1 = "value_1"
instance = ArrayEnumModel.objects.create(array_of_enums=[TestEnum.VALUE_1])
self.assertSequenceEqual(
ArrayEnumModel.objects.filter(array_of_enums__contains=[TestEnum.VALUE_1]),
[instance],
)
def test_unsupported_lookup(self):
msg = (
"Unsupported lookup '0_bar' for ArrayField or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0_bar=[2]))
msg = (
"Unsupported lookup '0bar' for ArrayField or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0bar=[2]))
def test_grouping_by_annotations_with_array_field_param(self):
value = models.Value([1], output_field=ArrayField(models.IntegerField()))
self.assertEqual(
NullableIntegerArrayModel.objects.annotate(
array_length=models.Func(
value,
1,
function="ARRAY_LENGTH",
output_field=models.IntegerField(),
),
)
.values("array_length")
.annotate(
count=models.Count("pk"),
)
.get()["array_length"],
1,
)
def test_filter_by_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.filter(
field__len=models.OuterRef("field__len"),
).values("field")
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.alias(
same_sized_fields=ArraySubquery(inner_qs),
).filter(same_sized_fields__len__gt=1),
self.objs[0:2],
)
def test_annotated_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.exclude(
pk=models.OuterRef("pk")
).values("order")
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.annotate(
sibling_ids=ArraySubquery(inner_qs),
)
.get(order=1)
.sibling_ids,
[2, 3, 4, 5],
)
def test_group_by_with_annotated_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.exclude(
pk=models.OuterRef("pk")
).values("order")
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.annotate(
sibling_ids=ArraySubquery(inner_qs),
sibling_count=models.Max("sibling_ids__len"),
).values_list("sibling_count", flat=True),
[len(self.objs) - 1] * len(self.objs),
)
def test_annotated_ordered_array_subquery(self):
inner_qs = NullableIntegerArrayModel.objects.order_by("-order").values("order")
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.annotate(
ids=ArraySubquery(inner_qs),
)
.first()
.ids,
[5, 4, 3, 2, 1],
)
def test_annotated_array_subquery_with_json_objects(self):
inner_qs = NullableIntegerArrayModel.objects.exclude(
pk=models.OuterRef("pk")
).values(json=JSONObject(order="order", field="field"))
siblings_json = (
NullableIntegerArrayModel.objects.annotate(
siblings_json=ArraySubquery(inner_qs),
)
.values_list("siblings_json", flat=True)
.get(order=1)
)
self.assertSequenceEqual(
siblings_json,
[
{"field": [2], "order": 2},
{"field": [2, 3], "order": 3},
{"field": [20, 30, 40], "order": 4},
{"field": None, "order": 5},
],
)
class TestDateTimeExactQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
now = timezone.now()
cls.datetimes = [now]
cls.dates = [now.date()]
cls.times = [now.time()]
cls.objs = [
DateTimeArrayModel.objects.create(
datetimes=cls.datetimes, dates=cls.dates, times=cls.times
),
]
def test_exact_datetimes(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(datetimes=self.datetimes), self.objs
)
def test_exact_dates(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(dates=self.dates), self.objs
)
def test_exact_times(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(times=self.times), self.objs
)
class TestOtherTypesExactQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.ips = ["192.168.0.1", "::1"]
cls.uuids = [uuid.uuid4()]
cls.decimals = [decimal.Decimal(1.25), 1.75]
cls.tags = [Tag(1), Tag(2), Tag(3)]
cls.objs = [
OtherTypesArrayModel.objects.create(
ips=cls.ips,
uuids=cls.uuids,
decimals=cls.decimals,
tags=cls.tags,
)
]
def test_exact_ip_addresses(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(ips=self.ips), self.objs
)
def test_exact_uuids(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(uuids=self.uuids), self.objs
)
def test_exact_decimals(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(decimals=self.decimals), self.objs
)
def test_exact_tags(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(tags=self.tags), self.objs
)
@isolate_apps("postgres_tests")
class TestChecks(PostgreSQLSimpleTestCase):
def test_field_checks(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.CharField(max_length=-1))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField has a non-positive max_length.
self.assertEqual(errors[0].id, "postgres.E001")
self.assertIn("max_length", errors[0].msg)
def test_invalid_base_fields(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
models.ManyToManyField("postgres_tests.IntegerArrayModel")
)
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, "postgres.E002")
def test_invalid_default(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=[])
model = MyModel()
self.assertEqual(
model.check(),
[
checks.Warning(
msg=(
"ArrayField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint="Use a callable instead, e.g., use `list` instead of `[]`.",
obj=MyModel._meta.get_field("field"),
id="fields.E010",
)
],
)
def test_valid_default(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=list)
model = MyModel()
self.assertEqual(model.check(), [])
def test_valid_default_none(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=None)
model = MyModel()
self.assertEqual(model.check(), [])
def test_nested_field_checks(self):
"""
Nested ArrayFields are permitted.
"""
class MyModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.CharField(max_length=-1)))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField has a non-positive max_length.
self.assertEqual(errors[0].id, "postgres.E001")
self.assertIn("max_length", errors[0].msg)
def test_choices_tuple_list(self):
class MyModel(PostgreSQLModel):
field = ArrayField(
models.CharField(max_length=16),
choices=[
[
"Media",
[(["vinyl", "cd"], "Audio"), (("vhs", "dvd"), "Video")],
],
(["mp3", "mp4"], "Digital"),
],
)
self.assertEqual(MyModel._meta.get_field("field").check(), [])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
class TestMigrations(TransactionTestCase):
available_apps = ["postgres_tests"]
def test_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(type(new.base_field), type(field.base_field))
self.assertIsNot(new.base_field, field.base_field)
def test_deconstruct_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.size, field.size)
def test_deconstruct_args(self):
field = ArrayField(models.CharField(max_length=20))
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.base_field.max_length, field.base_field.max_length)
def test_subclass_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.contrib.postgres.fields.ArrayField")
field = ArrayFieldSubclass()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "postgres_tests.models.ArrayFieldSubclass")
@override_settings(
MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_default_migrations",
}
)
def test_adding_field_with_default(self):
# See #22962
table_name = "postgres_tests_integerarraydefaultmodel"
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
call_command("migrate", "postgres_tests", verbosity=0)
with connection.cursor() as cursor:
self.assertIn(table_name, connection.introspection.table_names(cursor))
call_command("migrate", "postgres_tests", "zero", verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
@override_settings(
MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_index_migrations",
}
)
def test_adding_arrayfield_with_index(self):
"""
ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes.
"""
table_name = "postgres_tests_chartextarrayindexmodel"
call_command("migrate", "postgres_tests", verbosity=0)
with connection.cursor() as cursor:
like_constraint_columns_list = [
v["columns"]
for k, v in list(
connection.introspection.get_constraints(cursor, table_name).items()
)
if k.endswith("_like")
]
# Only the CharField should have a LIKE index.
self.assertEqual(like_constraint_columns_list, [["char2"]])
# All fields should have regular indexes.
with connection.cursor() as cursor:
indexes = [
c["columns"][0]
for c in connection.introspection.get_constraints(
cursor, table_name
).values()
if c["index"] and len(c["columns"]) == 1
]
self.assertIn("char", indexes)
self.assertIn("char2", indexes)
self.assertIn("text", indexes)
call_command("migrate", "postgres_tests", "zero", verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = (
'[{"fields": {"field": "[\\"1\\", \\"2\\", null]"}, '
'"model": "postgres_tests.integerarraymodel", "pk": null}]'
)
def test_dumping(self):
instance = IntegerArrayModel(field=[1, 2, None])
data = serializers.serialize("json", [instance])
self.assertEqual(json.loads(data), json.loads(self.test_data))
def test_loading(self):
instance = list(serializers.deserialize("json", self.test_data))[0].object
self.assertEqual(instance.field, [1, 2, None])
class TestValidation(PostgreSQLSimpleTestCase):
def test_unbounded(self):
field = ArrayField(models.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, None], None)
self.assertEqual(cm.exception.code, "item_invalid")
self.assertEqual(
cm.exception.message % cm.exception.params,
"Item 2 in the array did not validate: This field cannot be null.",
)
def test_blank_true(self):
field = ArrayField(models.IntegerField(blank=True, null=True))
# This should not raise a validation error
field.clean([1, None], None)
def test_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
field.clean([1, 2, 3], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, 2, 3, 4], None)
self.assertEqual(
cm.exception.messages[0],
"List contains 4 items, it should contain no more than 3.",
)
def test_nested_array_mismatch(self):
field = ArrayField(ArrayField(models.IntegerField()))
field.clean([[1, 2], [3, 4]], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([[1, 2], [3, 4, 5]], None)
self.assertEqual(cm.exception.code, "nested_array_mismatch")
self.assertEqual(
cm.exception.messages[0], "Nested arrays must have the same length."
)
def test_with_base_field_error_params(self):
field = ArrayField(models.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["abc"], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
"Item 1 in the array did not validate: Ensure this value has at most 2 "
"characters (it has 3).",
)
self.assertEqual(exception.code, "item_invalid")
self.assertEqual(
exception.params,
{"nth": 1, "value": "abc", "limit_value": 2, "show_value": 3},
)
def test_with_validators(self):
field = ArrayField(
models.IntegerField(validators=[validators.MinValueValidator(1)])
)
field.clean([1, 2], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([0], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
"Item 1 in the array did not validate: Ensure this value is greater than "
"or equal to 1.",
)
self.assertEqual(exception.code, "item_invalid")
self.assertEqual(
exception.params, {"nth": 1, "value": 0, "limit_value": 1, "show_value": 0}
)
class TestSimpleFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
field = SimpleArrayField(forms.CharField())
value = field.clean("a,b,c")
self.assertEqual(value, ["a", "b", "c"])
def test_to_python_fail(self):
field = SimpleArrayField(forms.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("a,b,9")
self.assertEqual(
cm.exception.messages[0],
"Item 1 in the array did not validate: Enter a whole number.",
)
def test_validate_fail(self):
field = SimpleArrayField(forms.CharField(required=True))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("a,b,")
self.assertEqual(
cm.exception.messages[0],
"Item 3 in the array did not validate: This field is required.",
)
def test_validate_fail_base_field_error_params(self):
field = SimpleArrayField(forms.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("abc,c,defg")
errors = cm.exception.error_list
self.assertEqual(len(errors), 2)
first_error = errors[0]
self.assertEqual(
first_error.message,
"Item 1 in the array did not validate: Ensure this value has at most 2 "
"characters (it has 3).",
)
self.assertEqual(first_error.code, "item_invalid")
self.assertEqual(
first_error.params,
{"nth": 1, "value": "abc", "limit_value": 2, "show_value": 3},
)
second_error = errors[1]
self.assertEqual(
second_error.message,
"Item 3 in the array did not validate: Ensure this value has at most 2 "
"characters (it has 4).",
)
self.assertEqual(second_error.code, "item_invalid")
self.assertEqual(
second_error.params,
{"nth": 3, "value": "defg", "limit_value": 2, "show_value": 4},
)
def test_validators_fail(self):
field = SimpleArrayField(forms.RegexField("[a-e]{2}"))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("a,bc,de")
self.assertEqual(
cm.exception.messages[0],
"Item 1 in the array did not validate: Enter a valid value.",
)
def test_delimiter(self):
field = SimpleArrayField(forms.CharField(), delimiter="|")
value = field.clean("a|b|c")
self.assertEqual(value, ["a", "b", "c"])
def test_delimiter_with_nesting(self):
field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter="|")
value = field.clean("a,b|c,d")
self.assertEqual(value, [["a", "b"], ["c", "d"]])
def test_prepare_value(self):
field = SimpleArrayField(forms.CharField())
value = field.prepare_value(["a", "b", "c"])
self.assertEqual(value, "a,b,c")
def test_max_length(self):
field = SimpleArrayField(forms.CharField(), max_length=2)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("a,b,c")
self.assertEqual(
cm.exception.messages[0],
"List contains 3 items, it should contain no more than 2.",
)
def test_min_length(self):
field = SimpleArrayField(forms.CharField(), min_length=4)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("a,b,c")
self.assertEqual(
cm.exception.messages[0],
"List contains 3 items, it should contain no fewer than 4.",
)
def test_required(self):
field = SimpleArrayField(forms.CharField(), required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("")
self.assertEqual(cm.exception.messages[0], "This field is required.")
def test_model_field_formfield(self):
model_field = ArrayField(models.CharField(max_length=27))
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertIsInstance(form_field.base_field, forms.CharField)
self.assertEqual(form_field.base_field.max_length, 27)
def test_model_field_formfield_size(self):
model_field = ArrayField(models.CharField(max_length=27), size=4)
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertEqual(form_field.max_length, 4)
def test_model_field_choices(self):
model_field = ArrayField(models.IntegerField(choices=((1, "A"), (2, "B"))))
form_field = model_field.formfield()
self.assertEqual(form_field.clean("1,2"), [1, 2])
def test_already_converted_value(self):
field = SimpleArrayField(forms.CharField())
vals = ["a", "b", "c"]
self.assertEqual(field.clean(vals), vals)
def test_has_changed(self):
field = SimpleArrayField(forms.IntegerField())
self.assertIs(field.has_changed([1, 2], [1, 2]), False)
self.assertIs(field.has_changed([1, 2], "1,2"), False)
self.assertIs(field.has_changed([1, 2], "1,2,3"), True)
self.assertIs(field.has_changed([1, 2], "a,b"), True)
def test_has_changed_empty(self):
field = SimpleArrayField(forms.CharField())
self.assertIs(field.has_changed(None, None), False)
self.assertIs(field.has_changed(None, ""), False)
self.assertIs(field.has_changed(None, []), False)
self.assertIs(field.has_changed([], None), False)
self.assertIs(field.has_changed([], ""), False)
class TestSplitFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {"array_0": "a", "array_1": "b", "array_2": "c"}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {"array": ["a", "b", "c"]})
def test_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), required=True, size=3)
data = {"array_0": "", "array_1": "", "array_2": ""}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {"array": ["This field is required."]})
def test_remove_trailing_nulls(self):
class SplitForm(forms.Form):
array = SplitArrayField(
forms.CharField(required=False), size=5, remove_trailing_nulls=True
)
data = {
"array_0": "a",
"array_1": "",
"array_2": "b",
"array_3": "",
"array_4": "",
}
form = SplitForm(data)
self.assertTrue(form.is_valid(), form.errors)
self.assertEqual(form.cleaned_data, {"array": ["a", "", "b"]})
def test_remove_trailing_nulls_not_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(
forms.CharField(required=False),
size=2,
remove_trailing_nulls=True,
required=False,
)
data = {"array_0": "", "array_1": ""}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {"array": []})
def test_required_field(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {"array_0": "a", "array_1": "b", "array_2": ""}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors,
{
"array": [
"Item 3 in the array did not validate: This field is required."
]
},
)
def test_invalid_integer(self):
msg = (
"Item 2 in the array did not validate: Ensure this value is less than or "
"equal to 100."
)
with self.assertRaisesMessage(exceptions.ValidationError, msg):
SplitArrayField(forms.IntegerField(max_value=100), size=2).clean([0, 101])
def test_rendering(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
self.assertHTMLEqual(
str(SplitForm()),
"""
<div>
<label for="id_array_0">Array:</label>
<input id="id_array_0" name="array_0" type="text" required>
<input id="id_array_1" name="array_1" type="text" required>
<input id="id_array_2" name="array_2" type="text" required>
</div>
""",
)
def test_invalid_char_length(self):
field = SplitArrayField(forms.CharField(max_length=2), size=3)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["abc", "c", "defg"])
self.assertEqual(
cm.exception.messages,
[
"Item 1 in the array did not validate: Ensure this value has at most 2 "
"characters (it has 3).",
"Item 3 in the array did not validate: Ensure this value has at most 2 "
"characters (it has 4).",
],
)
def test_splitarraywidget_value_omitted_from_data(self):
class Form(forms.ModelForm):
field = SplitArrayField(forms.IntegerField(), required=False, size=2)
class Meta:
model = IntegerArrayModel
fields = ("field",)
form = Form({"field_0": "1", "field_1": "2"})
self.assertEqual(form.errors, {})
obj = form.save(commit=False)
self.assertEqual(obj.field, [1, 2])
def test_splitarrayfield_has_changed(self):
class Form(forms.ModelForm):
field = SplitArrayField(forms.IntegerField(), required=False, size=2)
class Meta:
model = IntegerArrayModel
fields = ("field",)
tests = [
({}, {"field_0": "", "field_1": ""}, True),
({"field": None}, {"field_0": "", "field_1": ""}, True),
({"field": [1]}, {"field_0": "", "field_1": ""}, True),
({"field": [1]}, {"field_0": "1", "field_1": "0"}, True),
({"field": [1, 2]}, {"field_0": "1", "field_1": "2"}, False),
({"field": [1, 2]}, {"field_0": "a", "field_1": "b"}, True),
]
for initial, data, expected_result in tests:
with self.subTest(initial=initial, data=data):
obj = IntegerArrayModel(**initial)
form = Form(data, instance=obj)
self.assertIs(form.has_changed(), expected_result)
def test_splitarrayfield_remove_trailing_nulls_has_changed(self):
class Form(forms.ModelForm):
field = SplitArrayField(
forms.IntegerField(), required=False, size=2, remove_trailing_nulls=True
)
class Meta:
model = IntegerArrayModel
fields = ("field",)
tests = [
({}, {"field_0": "", "field_1": ""}, False),
({"field": None}, {"field_0": "", "field_1": ""}, False),
({"field": []}, {"field_0": "", "field_1": ""}, False),
({"field": [1]}, {"field_0": "1", "field_1": ""}, False),
]
for initial, data, expected_result in tests:
with self.subTest(initial=initial, data=data):
obj = IntegerArrayModel(**initial)
form = Form(data, instance=obj)
self.assertIs(form.has_changed(), expected_result)
class TestSplitFormWidget(PostgreSQLWidgetTestCase):
def test_get_context(self):
self.assertEqual(
SplitArrayWidget(forms.TextInput(), size=2).get_context(
"name", ["val1", "val2"]
),
{
"widget": {
"name": "name",
"is_hidden": False,
"required": False,
"value": "['val1', 'val2']",
"attrs": {},
"template_name": "postgres/widgets/split_array.html",
"subwidgets": [
{
"name": "name_0",
"is_hidden": False,
"required": False,
"value": "val1",
"attrs": {},
"template_name": "django/forms/widgets/text.html",
"type": "text",
},
{
"name": "name_1",
"is_hidden": False,
"required": False,
"value": "val2",
"attrs": {},
"template_name": "django/forms/widgets/text.html",
"type": "text",
},
],
}
},
)
def test_checkbox_get_context_attrs(self):
context = SplitArrayWidget(
forms.CheckboxInput(),
size=2,
).get_context("name", [True, False])
self.assertEqual(context["widget"]["value"], "[True, False]")
self.assertEqual(
[subwidget["attrs"] for subwidget in context["widget"]["subwidgets"]],
[{"checked": True}, {}],
)
def test_render(self):
self.check_html(
SplitArrayWidget(forms.TextInput(), size=2),
"array",
None,
"""
<input name="array_0" type="text">
<input name="array_1" type="text">
""",
)
def test_render_attrs(self):
self.check_html(
SplitArrayWidget(forms.TextInput(), size=2),
"array",
["val1", "val2"],
attrs={"id": "foo"},
html=(
"""
<input id="foo_0" name="array_0" type="text" value="val1">
<input id="foo_1" name="array_1" type="text" value="val2">
"""
),
)
def test_value_omitted_from_data(self):
widget = SplitArrayWidget(forms.TextInput(), size=2)
self.assertIs(widget.value_omitted_from_data({}, {}, "field"), True)
self.assertIs(
widget.value_omitted_from_data({"field_0": "value"}, {}, "field"), False
)
self.assertIs(
widget.value_omitted_from_data({"field_1": "value"}, {}, "field"), False
)
self.assertIs(
widget.value_omitted_from_data(
{"field_0": "value", "field_1": "value"}, {}, "field"
),
False,
)
class TestAdminUtils(PostgreSQLTestCase):
empty_value = "-empty-"
def test_array_display_for_field(self):
array_field = ArrayField(models.IntegerField())
display_value = display_for_field(
[1, 2],
array_field,
self.empty_value,
)
self.assertEqual(display_value, "1, 2")
def test_array_with_choices_display_for_field(self):
array_field = ArrayField(
models.IntegerField(),
choices=[
([1, 2, 3], "1st choice"),
([1, 2], "2nd choice"),
],
)
display_value = display_for_field(
[1, 2],
array_field,
self.empty_value,
)
self.assertEqual(display_value, "2nd choice")
display_value = display_for_field(
[99, 99],
array_field,
self.empty_value,
)
self.assertEqual(display_value, self.empty_value)
|
db4477635bb8c4ecc15b46bd2aed0665e2024ec4e4dff160bbd0bc8355d477c3 | from django.db import connection
from . import PostgreSQLTestCase
try:
from django.contrib.postgres.signals import (
get_citext_oids,
get_hstore_oids,
register_type_handlers,
)
except ImportError:
pass # psycopg isn't installed.
class OIDTests(PostgreSQLTestCase):
def assertOIDs(self, oids):
self.assertIsInstance(oids, tuple)
self.assertGreater(len(oids), 0)
self.assertTrue(all(isinstance(oid, int) for oid in oids))
def test_hstore_cache(self):
get_hstore_oids(connection.alias)
with self.assertNumQueries(0):
get_hstore_oids(connection.alias)
def test_citext_cache(self):
get_citext_oids(connection.alias)
with self.assertNumQueries(0):
get_citext_oids(connection.alias)
def test_hstore_values(self):
oids, array_oids = get_hstore_oids(connection.alias)
self.assertOIDs(oids)
self.assertOIDs(array_oids)
def test_citext_values(self):
oids, citext_oids = get_citext_oids(connection.alias)
self.assertOIDs(oids)
self.assertOIDs(citext_oids)
def test_register_type_handlers_no_db(self):
"""Registering type handlers for the nodb connection does nothing."""
with connection._nodb_cursor() as cursor:
register_type_handlers(cursor.db)
|
98d81a0714a9b6c67ccf307b7998bb2484d85eb955e3938c14338f4f451ffd75 | from . import PostgreSQLTestCase
from .models import CharFieldModel, TextFieldModel
try:
from django.contrib.postgres.search import (
TrigramDistance,
TrigramSimilarity,
TrigramStrictWordDistance,
TrigramStrictWordSimilarity,
TrigramWordDistance,
TrigramWordSimilarity,
)
except ImportError:
pass
class TrigramTest(PostgreSQLTestCase):
Model = CharFieldModel
@classmethod
def setUpTestData(cls):
cls.Model.objects.bulk_create(
[
cls.Model(field="Matthew"),
cls.Model(field="Cat sat on mat."),
cls.Model(field="Dog sat on rug."),
]
)
def test_trigram_search(self):
self.assertQuerySetEqual(
self.Model.objects.filter(field__trigram_similar="Mathew"),
["Matthew"],
transform=lambda instance: instance.field,
)
def test_trigram_word_search(self):
obj = self.Model.objects.create(
field="Gumby rides on the path of Middlesbrough",
)
self.assertSequenceEqual(
self.Model.objects.filter(field__trigram_word_similar="Middlesborough"),
[obj],
)
self.assertSequenceEqual(
self.Model.objects.filter(field__trigram_word_similar="Middle"),
[obj],
)
def test_trigram_strict_word_search_matched(self):
obj = self.Model.objects.create(
field="Gumby rides on the path of Middlesbrough",
)
self.assertSequenceEqual(
self.Model.objects.filter(
field__trigram_strict_word_similar="Middlesborough"
),
[obj],
)
self.assertSequenceEqual(
self.Model.objects.filter(field__trigram_strict_word_similar="Middle"),
[],
)
def test_trigram_similarity(self):
search = "Bat sat on cat."
# Round result of similarity because PostgreSQL uses greater precision.
self.assertQuerySetEqual(
self.Model.objects.filter(
field__trigram_similar=search,
)
.annotate(similarity=TrigramSimilarity("field", search))
.order_by("-similarity"),
[("Cat sat on mat.", 0.625), ("Dog sat on rug.", 0.333333)],
transform=lambda instance: (instance.field, round(instance.similarity, 6)),
ordered=True,
)
def test_trigram_word_similarity(self):
search = "mat"
self.assertSequenceEqual(
self.Model.objects.filter(
field__trigram_word_similar=search,
)
.annotate(
word_similarity=TrigramWordSimilarity(search, "field"),
)
.values("field", "word_similarity")
.order_by("-word_similarity"),
[
{"field": "Cat sat on mat.", "word_similarity": 1.0},
{"field": "Matthew", "word_similarity": 0.75},
],
)
def test_trigram_strict_word_similarity(self):
search = "matt"
self.assertSequenceEqual(
self.Model.objects.filter(field__trigram_word_similar=search)
.annotate(word_similarity=TrigramStrictWordSimilarity(search, "field"))
.values("field", "word_similarity")
.order_by("-word_similarity"),
[
{"field": "Cat sat on mat.", "word_similarity": 0.5},
{"field": "Matthew", "word_similarity": 0.44444445},
],
)
def test_trigram_similarity_alternate(self):
# Round result of distance because PostgreSQL uses greater precision.
self.assertQuerySetEqual(
self.Model.objects.annotate(
distance=TrigramDistance("field", "Bat sat on cat."),
)
.filter(distance__lte=0.7)
.order_by("distance"),
[("Cat sat on mat.", 0.375), ("Dog sat on rug.", 0.666667)],
transform=lambda instance: (instance.field, round(instance.distance, 6)),
ordered=True,
)
def test_trigram_word_similarity_alternate(self):
self.assertSequenceEqual(
self.Model.objects.annotate(
word_distance=TrigramWordDistance("mat", "field"),
)
.filter(
word_distance__lte=0.7,
)
.values("field", "word_distance")
.order_by("word_distance"),
[
{"field": "Cat sat on mat.", "word_distance": 0},
{"field": "Matthew", "word_distance": 0.25},
],
)
def test_trigram_strict_word_distance(self):
self.assertSequenceEqual(
self.Model.objects.annotate(
word_distance=TrigramStrictWordDistance("matt", "field"),
)
.filter(word_distance__lte=0.7)
.values("field", "word_distance")
.order_by("word_distance"),
[
{"field": "Cat sat on mat.", "word_distance": 0.5},
{"field": "Matthew", "word_distance": 0.5555556},
],
)
class TrigramTextFieldTest(TrigramTest):
"""
TextField has the same behavior as CharField regarding trigram lookups.
"""
Model = TextFieldModel
|
3ca9b2bd8d01ccac1c32cfbddac2c987b259086b582a3f557759707c48163c5e | from django.db import connection, transaction
from django.db.models import (
CharField,
F,
Func,
IntegerField,
JSONField,
OuterRef,
Q,
Subquery,
Value,
Window,
)
from django.db.models.fields.json import KeyTextTransform, KeyTransform
from django.db.models.functions import Cast, Concat, Substr
from django.test import skipUnlessDBFeature
from django.test.utils import Approximate, ignore_warnings
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango50Warning, RemovedInDjango51Warning
from . import PostgreSQLTestCase
from .models import AggregateTestModel, HotelReservation, Room, StatTestModel
try:
from django.contrib.postgres.aggregates import (
ArrayAgg,
BitAnd,
BitOr,
BitXor,
BoolAnd,
BoolOr,
Corr,
CovarPop,
JSONBAgg,
RegrAvgX,
RegrAvgY,
RegrCount,
RegrIntercept,
RegrR2,
RegrSlope,
RegrSXX,
RegrSXY,
RegrSYY,
StatAggregate,
StringAgg,
)
from django.contrib.postgres.fields import ArrayField
except ImportError:
pass # psycopg2 is not installed
class TestGeneralAggregate(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.aggs = AggregateTestModel.objects.bulk_create(
[
AggregateTestModel(
boolean_field=True,
char_field="Foo1",
text_field="Text1",
integer_field=0,
),
AggregateTestModel(
boolean_field=False,
char_field="Foo2",
text_field="Text2",
integer_field=1,
json_field={"lang": "pl"},
),
AggregateTestModel(
boolean_field=False,
char_field="Foo4",
text_field="Text4",
integer_field=2,
json_field={"lang": "en"},
),
AggregateTestModel(
boolean_field=True,
char_field="Foo3",
text_field="Text3",
integer_field=0,
json_field={"breed": "collie"},
),
]
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_empty_result_set(self):
AggregateTestModel.objects.all().delete()
tests = [
(ArrayAgg("char_field"), []),
(ArrayAgg("integer_field"), []),
(ArrayAgg("boolean_field"), []),
(BitAnd("integer_field"), None),
(BitOr("integer_field"), None),
(BoolAnd("boolean_field"), None),
(BoolOr("boolean_field"), None),
(JSONBAgg("integer_field"), []),
(StringAgg("char_field", delimiter=";"), ""),
]
if connection.features.has_bit_xor:
tests.append((BitXor("integer_field"), None))
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = AggregateTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = AggregateTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_default_argument(self):
AggregateTestModel.objects.all().delete()
tests = [
(ArrayAgg("char_field", default=["<empty>"]), ["<empty>"]),
(ArrayAgg("integer_field", default=[0]), [0]),
(ArrayAgg("boolean_field", default=[False]), [False]),
(BitAnd("integer_field", default=0), 0),
(BitOr("integer_field", default=0), 0),
(BoolAnd("boolean_field", default=False), False),
(BoolOr("boolean_field", default=False), False),
(JSONBAgg("integer_field", default=["<empty>"]), ["<empty>"]),
(
JSONBAgg("integer_field", default=Value(["<empty>"], JSONField())),
["<empty>"],
),
(StringAgg("char_field", delimiter=";", default="<empty>"), "<empty>"),
(
StringAgg("char_field", delimiter=";", default=Value("<empty>")),
"<empty>",
),
]
if connection.features.has_bit_xor:
tests.append((BitXor("integer_field", default=0), 0))
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = AggregateTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with transaction.atomic(), self.assertNumQueries(1):
values = AggregateTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_convert_value_deprecation(self):
AggregateTestModel.objects.all().delete()
queryset = AggregateTestModel.objects.all()
with self.assertWarnsMessage(
RemovedInDjango50Warning, ArrayAgg.deprecation_msg
):
queryset.aggregate(aggregation=ArrayAgg("boolean_field"))
with self.assertWarnsMessage(
RemovedInDjango50Warning, JSONBAgg.deprecation_msg
):
queryset.aggregate(aggregation=JSONBAgg("integer_field"))
with self.assertWarnsMessage(
RemovedInDjango50Warning, StringAgg.deprecation_msg
):
queryset.aggregate(aggregation=StringAgg("char_field", delimiter=";"))
# No warnings raised if default argument provided.
self.assertEqual(
queryset.aggregate(aggregation=ArrayAgg("boolean_field", default=None)),
{"aggregation": None},
)
self.assertEqual(
queryset.aggregate(aggregation=JSONBAgg("integer_field", default=None)),
{"aggregation": None},
)
self.assertEqual(
queryset.aggregate(
aggregation=StringAgg("char_field", delimiter=";", default=None),
),
{"aggregation": None},
)
self.assertEqual(
queryset.aggregate(
aggregation=ArrayAgg("boolean_field", default=Value([]))
),
{"aggregation": []},
)
self.assertEqual(
queryset.aggregate(aggregation=JSONBAgg("integer_field", default=[])),
{"aggregation": []},
)
self.assertEqual(
queryset.aggregate(
aggregation=StringAgg("char_field", delimiter=";", default=Value("")),
),
{"aggregation": ""},
)
@ignore_warnings(category=RemovedInDjango51Warning)
def test_jsonb_agg_default_str_value(self):
AggregateTestModel.objects.all().delete()
queryset = AggregateTestModel.objects.all()
self.assertEqual(
queryset.aggregate(
aggregation=JSONBAgg("integer_field", default=Value("<empty>"))
),
{"aggregation": "<empty>"},
)
def test_jsonb_agg_default_str_value_deprecation(self):
queryset = AggregateTestModel.objects.all()
msg = (
"Passing a Value() with an output_field that isn't a JSONField as "
"JSONBAgg(default) is deprecated. Pass default=Value('<empty>', "
"output_field=JSONField()) instead."
)
with self.assertWarnsMessage(RemovedInDjango51Warning, msg):
queryset.aggregate(
aggregation=JSONBAgg("integer_field", default=Value("<empty>"))
)
with self.assertWarnsMessage(RemovedInDjango51Warning, msg):
queryset.none().aggregate(
aggregation=JSONBAgg("integer_field", default=Value("<empty>"))
),
@ignore_warnings(category=RemovedInDjango51Warning)
def test_jsonb_agg_default_encoded_json_string(self):
AggregateTestModel.objects.all().delete()
queryset = AggregateTestModel.objects.all()
self.assertEqual(
queryset.aggregate(
aggregation=JSONBAgg("integer_field", default=Value("[]"))
),
{"aggregation": []},
)
def test_jsonb_agg_default_encoded_json_string_deprecation(self):
queryset = AggregateTestModel.objects.all()
msg = (
"Passing an encoded JSON string as JSONBAgg(default) is deprecated. Pass "
"default=[] instead."
)
with self.assertWarnsMessage(RemovedInDjango51Warning, msg):
queryset.aggregate(
aggregation=JSONBAgg("integer_field", default=Value("[]"))
)
with self.assertWarnsMessage(RemovedInDjango51Warning, msg):
queryset.none().aggregate(
aggregation=JSONBAgg("integer_field", default=Value("[]"))
)
def test_array_agg_charfield(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg("char_field"))
self.assertEqual(values, {"arrayagg": ["Foo1", "Foo2", "Foo4", "Foo3"]})
def test_array_agg_charfield_ordering(self):
ordering_test_cases = (
(F("char_field").desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
(F("char_field").asc(), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(F("char_field"), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(
[F("boolean_field"), F("char_field").desc()],
["Foo4", "Foo2", "Foo3", "Foo1"],
),
(
(F("boolean_field"), F("char_field").desc()),
["Foo4", "Foo2", "Foo3", "Foo1"],
),
("char_field", ["Foo1", "Foo2", "Foo3", "Foo4"]),
("-char_field", ["Foo4", "Foo3", "Foo2", "Foo1"]),
(Concat("char_field", Value("@")), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(Concat("char_field", Value("@")).desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
(
(
Substr("char_field", 1, 1),
F("integer_field"),
Substr("char_field", 4, 1).desc(),
),
["Foo3", "Foo1", "Foo2", "Foo4"],
),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("char_field", ordering=ordering)
)
self.assertEqual(values, {"arrayagg": expected_output})
def test_array_agg_integerfield(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field")
)
self.assertEqual(values, {"arrayagg": [0, 1, 2, 0]})
def test_array_agg_integerfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field", ordering=F("integer_field").desc())
)
self.assertEqual(values, {"arrayagg": [2, 1, 0, 0]})
def test_array_agg_booleanfield(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("boolean_field")
)
self.assertEqual(values, {"arrayagg": [True, False, False, True]})
def test_array_agg_booleanfield_ordering(self):
ordering_test_cases = (
(F("boolean_field").asc(), [False, False, True, True]),
(F("boolean_field").desc(), [True, True, False, False]),
(F("boolean_field"), [False, False, True, True]),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("boolean_field", ordering=ordering)
)
self.assertEqual(values, {"arrayagg": expected_output})
def test_array_agg_jsonfield(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg(
KeyTransform("lang", "json_field"),
filter=Q(json_field__lang__isnull=False),
),
)
self.assertEqual(values, {"arrayagg": ["pl", "en"]})
def test_array_agg_jsonfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg(
KeyTransform("lang", "json_field"),
filter=Q(json_field__lang__isnull=False),
ordering=KeyTransform("lang", "json_field"),
),
)
self.assertEqual(values, {"arrayagg": ["en", "pl"]})
def test_array_agg_filter(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field", filter=Q(integer_field__gt=0)),
)
self.assertEqual(values, {"arrayagg": [1, 2]})
def test_array_agg_lookups(self):
aggr1 = AggregateTestModel.objects.create()
aggr2 = AggregateTestModel.objects.create()
StatTestModel.objects.create(related_field=aggr1, int1=1, int2=0)
StatTestModel.objects.create(related_field=aggr1, int1=2, int2=0)
StatTestModel.objects.create(related_field=aggr2, int1=3, int2=0)
StatTestModel.objects.create(related_field=aggr2, int1=4, int2=0)
qs = (
StatTestModel.objects.values("related_field")
.annotate(array=ArrayAgg("int1"))
.filter(array__overlap=[2])
.values_list("array", flat=True)
)
self.assertCountEqual(qs.get(), [1, 2])
def test_bit_and_general(self):
values = AggregateTestModel.objects.filter(integer_field__in=[0, 1]).aggregate(
bitand=BitAnd("integer_field")
)
self.assertEqual(values, {"bitand": 0})
def test_bit_and_on_only_true_values(self):
values = AggregateTestModel.objects.filter(integer_field=1).aggregate(
bitand=BitAnd("integer_field")
)
self.assertEqual(values, {"bitand": 1})
def test_bit_and_on_only_false_values(self):
values = AggregateTestModel.objects.filter(integer_field=0).aggregate(
bitand=BitAnd("integer_field")
)
self.assertEqual(values, {"bitand": 0})
def test_bit_or_general(self):
values = AggregateTestModel.objects.filter(integer_field__in=[0, 1]).aggregate(
bitor=BitOr("integer_field")
)
self.assertEqual(values, {"bitor": 1})
def test_bit_or_on_only_true_values(self):
values = AggregateTestModel.objects.filter(integer_field=1).aggregate(
bitor=BitOr("integer_field")
)
self.assertEqual(values, {"bitor": 1})
def test_bit_or_on_only_false_values(self):
values = AggregateTestModel.objects.filter(integer_field=0).aggregate(
bitor=BitOr("integer_field")
)
self.assertEqual(values, {"bitor": 0})
@skipUnlessDBFeature("has_bit_xor")
def test_bit_xor_general(self):
AggregateTestModel.objects.create(integer_field=3)
values = AggregateTestModel.objects.filter(
integer_field__in=[1, 3],
).aggregate(bitxor=BitXor("integer_field"))
self.assertEqual(values, {"bitxor": 2})
@skipUnlessDBFeature("has_bit_xor")
def test_bit_xor_on_only_true_values(self):
values = AggregateTestModel.objects.filter(
integer_field=1,
).aggregate(bitxor=BitXor("integer_field"))
self.assertEqual(values, {"bitxor": 1})
@skipUnlessDBFeature("has_bit_xor")
def test_bit_xor_on_only_false_values(self):
values = AggregateTestModel.objects.filter(
integer_field=0,
).aggregate(bitxor=BitXor("integer_field"))
self.assertEqual(values, {"bitxor": 0})
def test_bool_and_general(self):
values = AggregateTestModel.objects.aggregate(booland=BoolAnd("boolean_field"))
self.assertEqual(values, {"booland": False})
def test_bool_and_q_object(self):
values = AggregateTestModel.objects.aggregate(
booland=BoolAnd(Q(integer_field__gt=2)),
)
self.assertEqual(values, {"booland": False})
def test_bool_or_general(self):
values = AggregateTestModel.objects.aggregate(boolor=BoolOr("boolean_field"))
self.assertEqual(values, {"boolor": True})
def test_bool_or_q_object(self):
values = AggregateTestModel.objects.aggregate(
boolor=BoolOr(Q(integer_field__gt=2)),
)
self.assertEqual(values, {"boolor": False})
def test_string_agg_requires_delimiter(self):
with self.assertRaises(TypeError):
AggregateTestModel.objects.aggregate(stringagg=StringAgg("char_field"))
def test_string_agg_delimiter_escaping(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter="'")
)
self.assertEqual(values, {"stringagg": "Foo1'Foo2'Foo4'Foo3"})
def test_string_agg_charfield(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter=";")
)
self.assertEqual(values, {"stringagg": "Foo1;Foo2;Foo4;Foo3"})
def test_string_agg_default_output_field(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("text_field", delimiter=";"),
)
self.assertEqual(values, {"stringagg": "Text1;Text2;Text4;Text3"})
def test_string_agg_charfield_ordering(self):
ordering_test_cases = (
(F("char_field").desc(), "Foo4;Foo3;Foo2;Foo1"),
(F("char_field").asc(), "Foo1;Foo2;Foo3;Foo4"),
(F("char_field"), "Foo1;Foo2;Foo3;Foo4"),
("char_field", "Foo1;Foo2;Foo3;Foo4"),
("-char_field", "Foo4;Foo3;Foo2;Foo1"),
(Concat("char_field", Value("@")), "Foo1;Foo2;Foo3;Foo4"),
(Concat("char_field", Value("@")).desc(), "Foo4;Foo3;Foo2;Foo1"),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter=";", ordering=ordering)
)
self.assertEqual(values, {"stringagg": expected_output})
def test_string_agg_jsonfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg(
KeyTextTransform("lang", "json_field"),
delimiter=";",
ordering=KeyTextTransform("lang", "json_field"),
output_field=CharField(),
),
)
self.assertEqual(values, {"stringagg": "en;pl"})
def test_string_agg_filter(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg(
"char_field",
delimiter=";",
filter=Q(char_field__endswith="3") | Q(char_field__endswith="1"),
)
)
self.assertEqual(values, {"stringagg": "Foo1;Foo3"})
def test_orderable_agg_alternative_fields(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field", ordering=F("char_field").asc())
)
self.assertEqual(values, {"arrayagg": [0, 1, 0, 2]})
def test_jsonb_agg(self):
values = AggregateTestModel.objects.aggregate(jsonbagg=JSONBAgg("char_field"))
self.assertEqual(values, {"jsonbagg": ["Foo1", "Foo2", "Foo4", "Foo3"]})
def test_jsonb_agg_charfield_ordering(self):
ordering_test_cases = (
(F("char_field").desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
(F("char_field").asc(), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(F("char_field"), ["Foo1", "Foo2", "Foo3", "Foo4"]),
("char_field", ["Foo1", "Foo2", "Foo3", "Foo4"]),
("-char_field", ["Foo4", "Foo3", "Foo2", "Foo1"]),
(Concat("char_field", Value("@")), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(Concat("char_field", Value("@")).desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("char_field", ordering=ordering),
)
self.assertEqual(values, {"jsonbagg": expected_output})
def test_jsonb_agg_integerfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("integer_field", ordering=F("integer_field").desc()),
)
self.assertEqual(values, {"jsonbagg": [2, 1, 0, 0]})
def test_jsonb_agg_booleanfield_ordering(self):
ordering_test_cases = (
(F("boolean_field").asc(), [False, False, True, True]),
(F("boolean_field").desc(), [True, True, False, False]),
(F("boolean_field"), [False, False, True, True]),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("boolean_field", ordering=ordering),
)
self.assertEqual(values, {"jsonbagg": expected_output})
def test_jsonb_agg_jsonfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg(
KeyTransform("lang", "json_field"),
filter=Q(json_field__lang__isnull=False),
ordering=KeyTransform("lang", "json_field"),
),
)
self.assertEqual(values, {"jsonbagg": ["en", "pl"]})
def test_jsonb_agg_key_index_transforms(self):
room101 = Room.objects.create(number=101)
room102 = Room.objects.create(number=102)
datetimes = [
timezone.datetime(2018, 6, 20),
timezone.datetime(2018, 6, 24),
timezone.datetime(2018, 6, 28),
]
HotelReservation.objects.create(
datespan=(datetimes[0].date(), datetimes[1].date()),
start=datetimes[0],
end=datetimes[1],
room=room102,
requirements={"double_bed": True, "parking": True},
)
HotelReservation.objects.create(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
requirements={"double_bed": False, "sea_view": True, "parking": False},
)
HotelReservation.objects.create(
datespan=(datetimes[0].date(), datetimes[2].date()),
start=datetimes[0],
end=datetimes[2],
room=room101,
requirements={"sea_view": False},
)
values = (
Room.objects.annotate(
requirements=JSONBAgg(
"hotelreservation__requirements",
ordering="-hotelreservation__start",
)
)
.filter(requirements__0__sea_view=True)
.values("number", "requirements")
)
self.assertSequenceEqual(
values,
[
{
"number": 102,
"requirements": [
{"double_bed": False, "sea_view": True, "parking": False},
{"double_bed": True, "parking": True},
],
},
],
)
def test_string_agg_array_agg_ordering_in_subquery(self):
stats = []
for i, agg in enumerate(AggregateTestModel.objects.order_by("char_field")):
stats.append(StatTestModel(related_field=agg, int1=i, int2=i + 1))
stats.append(StatTestModel(related_field=agg, int1=i + 1, int2=i))
StatTestModel.objects.bulk_create(stats)
for aggregate, expected_result in (
(
ArrayAgg("stattestmodel__int1", ordering="-stattestmodel__int2"),
[
("Foo1", [0, 1]),
("Foo2", [1, 2]),
("Foo3", [2, 3]),
("Foo4", [3, 4]),
],
),
(
StringAgg(
Cast("stattestmodel__int1", CharField()),
delimiter=";",
ordering="-stattestmodel__int2",
),
[("Foo1", "0;1"), ("Foo2", "1;2"), ("Foo3", "2;3"), ("Foo4", "3;4")],
),
):
with self.subTest(aggregate=aggregate.__class__.__name__):
subquery = (
AggregateTestModel.objects.filter(
pk=OuterRef("pk"),
)
.annotate(agg=aggregate)
.values("agg")
)
values = (
AggregateTestModel.objects.annotate(
agg=Subquery(subquery),
)
.order_by("char_field")
.values_list("char_field", "agg")
)
self.assertEqual(list(values), expected_result)
def test_string_agg_array_agg_filter_in_subquery(self):
StatTestModel.objects.bulk_create(
[
StatTestModel(related_field=self.aggs[0], int1=0, int2=5),
StatTestModel(related_field=self.aggs[0], int1=1, int2=4),
StatTestModel(related_field=self.aggs[0], int1=2, int2=3),
]
)
for aggregate, expected_result in (
(
ArrayAgg("stattestmodel__int1", filter=Q(stattestmodel__int2__gt=3)),
[("Foo1", [0, 1]), ("Foo2", None)],
),
(
StringAgg(
Cast("stattestmodel__int2", CharField()),
delimiter=";",
filter=Q(stattestmodel__int1__lt=2),
),
[("Foo1", "5;4"), ("Foo2", None)],
),
):
with self.subTest(aggregate=aggregate.__class__.__name__):
subquery = (
AggregateTestModel.objects.filter(
pk=OuterRef("pk"),
)
.annotate(agg=aggregate)
.values("agg")
)
values = (
AggregateTestModel.objects.annotate(
agg=Subquery(subquery),
)
.filter(
char_field__in=["Foo1", "Foo2"],
)
.order_by("char_field")
.values_list("char_field", "agg")
)
self.assertEqual(list(values), expected_result)
def test_string_agg_filter_in_subquery_with_exclude(self):
subquery = (
AggregateTestModel.objects.annotate(
stringagg=StringAgg(
"char_field",
delimiter=";",
filter=Q(char_field__endswith="1"),
)
)
.exclude(stringagg="")
.values("id")
)
self.assertSequenceEqual(
AggregateTestModel.objects.filter(id__in=Subquery(subquery)),
[self.aggs[0]],
)
def test_ordering_isnt_cleared_for_array_subquery(self):
inner_qs = AggregateTestModel.objects.order_by("-integer_field")
qs = AggregateTestModel.objects.annotate(
integers=Func(
Subquery(inner_qs.values("integer_field")),
function="ARRAY",
output_field=ArrayField(base_field=IntegerField()),
),
)
self.assertSequenceEqual(
qs.first().integers,
inner_qs.values_list("integer_field", flat=True),
)
def test_window(self):
self.assertCountEqual(
AggregateTestModel.objects.annotate(
integers=Window(
expression=ArrayAgg("char_field"),
partition_by=F("integer_field"),
)
).values("integers", "char_field"),
[
{"integers": ["Foo1", "Foo3"], "char_field": "Foo1"},
{"integers": ["Foo1", "Foo3"], "char_field": "Foo3"},
{"integers": ["Foo2"], "char_field": "Foo2"},
{"integers": ["Foo4"], "char_field": "Foo4"},
],
)
def test_values_list(self):
tests = [ArrayAgg("integer_field"), JSONBAgg("integer_field")]
for aggregation in tests:
with self.subTest(aggregation=aggregation):
self.assertCountEqual(
AggregateTestModel.objects.values_list(aggregation),
[([0],), ([1],), ([2],), ([0],)],
)
class TestAggregateDistinct(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
AggregateTestModel.objects.create(char_field="Foo")
AggregateTestModel.objects.create(char_field="Foo")
AggregateTestModel.objects.create(char_field="Bar")
def test_string_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter=" ", distinct=False)
)
self.assertEqual(values["stringagg"].count("Foo"), 2)
self.assertEqual(values["stringagg"].count("Bar"), 1)
def test_string_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter=" ", distinct=True)
)
self.assertEqual(values["stringagg"].count("Foo"), 1)
self.assertEqual(values["stringagg"].count("Bar"), 1)
def test_array_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("char_field", distinct=False)
)
self.assertEqual(sorted(values["arrayagg"]), ["Bar", "Foo", "Foo"])
def test_array_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("char_field", distinct=True)
)
self.assertEqual(sorted(values["arrayagg"]), ["Bar", "Foo"])
def test_jsonb_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("char_field", distinct=False),
)
self.assertEqual(sorted(values["jsonbagg"]), ["Bar", "Foo", "Foo"])
def test_jsonb_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("char_field", distinct=True),
)
self.assertEqual(sorted(values["jsonbagg"]), ["Bar", "Foo"])
class TestStatisticsAggregate(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
StatTestModel.objects.create(
int1=1,
int2=3,
related_field=AggregateTestModel.objects.create(integer_field=0),
)
StatTestModel.objects.create(
int1=2,
int2=2,
related_field=AggregateTestModel.objects.create(integer_field=1),
)
StatTestModel.objects.create(
int1=3,
int2=1,
related_field=AggregateTestModel.objects.create(integer_field=2),
)
# Tests for base class (StatAggregate)
def test_missing_arguments_raises_exception(self):
with self.assertRaisesMessage(ValueError, "Both y and x must be provided."):
StatAggregate(x=None, y=None)
def test_correct_source_expressions(self):
func = StatAggregate(x="test", y=13)
self.assertIsInstance(func.source_expressions[0], Value)
self.assertIsInstance(func.source_expressions[1], F)
def test_alias_is_required(self):
class SomeFunc(StatAggregate):
function = "TEST"
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
StatTestModel.objects.aggregate(SomeFunc(y="int2", x="int1"))
# Test aggregates
def test_empty_result_set(self):
StatTestModel.objects.all().delete()
tests = [
(Corr(y="int2", x="int1"), None),
(CovarPop(y="int2", x="int1"), None),
(CovarPop(y="int2", x="int1", sample=True), None),
(RegrAvgX(y="int2", x="int1"), None),
(RegrAvgY(y="int2", x="int1"), None),
(RegrCount(y="int2", x="int1"), 0),
(RegrIntercept(y="int2", x="int1"), None),
(RegrR2(y="int2", x="int1"), None),
(RegrSlope(y="int2", x="int1"), None),
(RegrSXX(y="int2", x="int1"), None),
(RegrSXY(y="int2", x="int1"), None),
(RegrSYY(y="int2", x="int1"), None),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = StatTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = StatTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_default_argument(self):
StatTestModel.objects.all().delete()
tests = [
(Corr(y="int2", x="int1", default=0), 0),
(CovarPop(y="int2", x="int1", default=0), 0),
(CovarPop(y="int2", x="int1", sample=True, default=0), 0),
(RegrAvgX(y="int2", x="int1", default=0), 0),
(RegrAvgY(y="int2", x="int1", default=0), 0),
# RegrCount() doesn't support the default argument.
(RegrIntercept(y="int2", x="int1", default=0), 0),
(RegrR2(y="int2", x="int1", default=0), 0),
(RegrSlope(y="int2", x="int1", default=0), 0),
(RegrSXX(y="int2", x="int1", default=0), 0),
(RegrSXY(y="int2", x="int1", default=0), 0),
(RegrSYY(y="int2", x="int1", default=0), 0),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = StatTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = StatTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_corr_general(self):
values = StatTestModel.objects.aggregate(corr=Corr(y="int2", x="int1"))
self.assertEqual(values, {"corr": -1.0})
def test_covar_pop_general(self):
values = StatTestModel.objects.aggregate(covarpop=CovarPop(y="int2", x="int1"))
self.assertEqual(values, {"covarpop": Approximate(-0.66, places=1)})
def test_covar_pop_sample(self):
values = StatTestModel.objects.aggregate(
covarpop=CovarPop(y="int2", x="int1", sample=True)
)
self.assertEqual(values, {"covarpop": -1.0})
def test_regr_avgx_general(self):
values = StatTestModel.objects.aggregate(regravgx=RegrAvgX(y="int2", x="int1"))
self.assertEqual(values, {"regravgx": 2.0})
def test_regr_avgy_general(self):
values = StatTestModel.objects.aggregate(regravgy=RegrAvgY(y="int2", x="int1"))
self.assertEqual(values, {"regravgy": 2.0})
def test_regr_count_general(self):
values = StatTestModel.objects.aggregate(
regrcount=RegrCount(y="int2", x="int1")
)
self.assertEqual(values, {"regrcount": 3})
def test_regr_count_default(self):
msg = "RegrCount does not allow default."
with self.assertRaisesMessage(TypeError, msg):
RegrCount(y="int2", x="int1", default=0)
def test_regr_intercept_general(self):
values = StatTestModel.objects.aggregate(
regrintercept=RegrIntercept(y="int2", x="int1")
)
self.assertEqual(values, {"regrintercept": 4})
def test_regr_r2_general(self):
values = StatTestModel.objects.aggregate(regrr2=RegrR2(y="int2", x="int1"))
self.assertEqual(values, {"regrr2": 1})
def test_regr_slope_general(self):
values = StatTestModel.objects.aggregate(
regrslope=RegrSlope(y="int2", x="int1")
)
self.assertEqual(values, {"regrslope": -1})
def test_regr_sxx_general(self):
values = StatTestModel.objects.aggregate(regrsxx=RegrSXX(y="int2", x="int1"))
self.assertEqual(values, {"regrsxx": 2.0})
def test_regr_sxy_general(self):
values = StatTestModel.objects.aggregate(regrsxy=RegrSXY(y="int2", x="int1"))
self.assertEqual(values, {"regrsxy": -2.0})
def test_regr_syy_general(self):
values = StatTestModel.objects.aggregate(regrsyy=RegrSYY(y="int2", x="int1"))
self.assertEqual(values, {"regrsyy": 2.0})
def test_regr_avgx_with_related_obj_and_number_as_argument(self):
"""
This is more complex test to check if JOIN on field and
number as argument works as expected.
"""
values = StatTestModel.objects.aggregate(
complex_regravgx=RegrAvgX(y=5, x="related_field__integer_field")
)
self.assertEqual(values, {"complex_regravgx": 1.0})
|
6811560ebc2298f0813c2b125836c3bbf569a5654c11a9128c21d80a0e32f637 | """
Test PostgreSQL full text search.
These tests use dialogue from the 1975 film Monty Python and the Holy Grail.
All text copyright Python (Monty) Pictures. Thanks to sacred-texts.com for the
transcript.
"""
from django.db import connection
from django.db.models import F, Value
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import Character, Line, LineSavedSearch, Scene
try:
from django.contrib.postgres.search import (
SearchConfig,
SearchHeadline,
SearchQuery,
SearchRank,
SearchVector,
)
except ImportError:
pass
class GrailTestData:
@classmethod
def setUpTestData(cls):
cls.robin = Scene.objects.create(
scene="Scene 10", setting="The dark forest of Ewing"
)
cls.minstrel = Character.objects.create(name="Minstrel")
verses = [
(
"Bravely bold Sir Robin, rode forth from Camelot. "
"He was not afraid to die, o Brave Sir Robin. "
"He was not at all afraid to be killed in nasty ways. "
"Brave, brave, brave, brave Sir Robin"
),
(
"He was not in the least bit scared to be mashed into a pulp, "
"Or to have his eyes gouged out, and his elbows broken. "
"To have his kneecaps split, and his body burned away, "
"And his limbs all hacked and mangled, brave Sir Robin!"
),
(
"His head smashed in and his heart cut out, "
"And his liver removed and his bowels unplugged, "
"And his nostrils ripped and his bottom burned off,"
"And his --"
),
]
cls.verses = [
Line.objects.create(
scene=cls.robin,
character=cls.minstrel,
dialogue=verse,
)
for verse in verses
]
cls.verse0, cls.verse1, cls.verse2 = cls.verses
cls.witch_scene = Scene.objects.create(
scene="Scene 5", setting="Sir Bedemir's Castle"
)
bedemir = Character.objects.create(name="Bedemir")
crowd = Character.objects.create(name="Crowd")
witch = Character.objects.create(name="Witch")
duck = Character.objects.create(name="Duck")
cls.bedemir0 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue="We shall use my larger scales!",
dialogue_config="english",
)
cls.bedemir1 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue="Right, remove the supports!",
dialogue_config="english",
)
cls.duck = Line.objects.create(
scene=cls.witch_scene, character=duck, dialogue=None
)
cls.crowd = Line.objects.create(
scene=cls.witch_scene, character=crowd, dialogue="A witch! A witch!"
)
cls.witch = Line.objects.create(
scene=cls.witch_scene, character=witch, dialogue="It's a fair cop."
)
trojan_rabbit = Scene.objects.create(
scene="Scene 8", setting="The castle of Our Master Ruiz' de lu la Ramper"
)
guards = Character.objects.create(name="French Guards")
cls.french = Line.objects.create(
scene=trojan_rabbit,
character=guards,
dialogue="Oh. Un beau cadeau. Oui oui.",
dialogue_config="french",
)
class SimpleSearchTest(GrailTestData, PostgreSQLTestCase):
def test_simple(self):
searched = Line.objects.filter(dialogue__search="elbows")
self.assertSequenceEqual(searched, [self.verse1])
def test_non_exact_match(self):
self.check_default_text_search_config()
searched = Line.objects.filter(dialogue__search="hearts")
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
self.check_default_text_search_config()
searched = Line.objects.filter(dialogue__search="heart bowel")
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms_with_partial_match(self):
searched = Line.objects.filter(dialogue__search="Robin killed")
self.assertSequenceEqual(searched, [self.verse0])
def test_search_query_config(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery("nostrils", config="simple"),
)
self.assertSequenceEqual(searched, [self.verse2])
def test_search_with_F_expression(self):
# Non-matching query.
LineSavedSearch.objects.create(line=self.verse1, query="hearts")
# Matching query.
match = LineSavedSearch.objects.create(line=self.verse1, query="elbows")
for query_expression in [F("query"), SearchQuery(F("query"))]:
with self.subTest(query_expression):
searched = LineSavedSearch.objects.filter(
line__dialogue__search=query_expression,
)
self.assertSequenceEqual(searched, [match])
class SearchVectorFieldTest(GrailTestData, PostgreSQLTestCase):
def test_existing_vector(self):
Line.objects.update(dialogue_search_vector=SearchVector("dialogue"))
searched = Line.objects.filter(
dialogue_search_vector=SearchQuery("Robin killed")
)
self.assertSequenceEqual(searched, [self.verse0])
def test_existing_vector_config_explicit(self):
Line.objects.update(dialogue_search_vector=SearchVector("dialogue"))
searched = Line.objects.filter(
dialogue_search_vector=SearchQuery("cadeaux", config="french")
)
self.assertSequenceEqual(searched, [self.french])
def test_single_coalesce_expression(self):
searched = Line.objects.annotate(search=SearchVector("dialogue")).filter(
search="cadeaux"
)
self.assertNotIn("COALESCE(COALESCE", str(searched.query))
class SearchConfigTests(PostgreSQLSimpleTestCase):
def test_from_parameter(self):
self.assertIsNone(SearchConfig.from_parameter(None))
self.assertEqual(SearchConfig.from_parameter("foo"), SearchConfig("foo"))
self.assertEqual(
SearchConfig.from_parameter(SearchConfig("bar")), SearchConfig("bar")
)
class MultipleFieldsTest(GrailTestData, PostgreSQLTestCase):
def test_simple_on_dialogue(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="elbows")
self.assertSequenceEqual(searched, [self.verse1])
def test_simple_on_scene(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="Forest")
self.assertCountEqual(searched, self.verses)
def test_non_exact_match(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="heart")
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="heart forest")
self.assertSequenceEqual(searched, [self.verse2])
def test_terms_adjacent(self):
searched = Line.objects.annotate(
search=SearchVector("character__name", "dialogue"),
).filter(search="minstrel")
self.assertCountEqual(searched, self.verses)
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="minstrelbravely")
self.assertSequenceEqual(searched, [])
def test_search_with_null(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="bedemir")
self.assertCountEqual(
searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck]
)
def test_search_with_non_text(self):
searched = Line.objects.annotate(
search=SearchVector("id"),
).filter(search=str(self.crowd.id))
self.assertSequenceEqual(searched, [self.crowd])
def test_phrase_search(self):
line_qs = Line.objects.annotate(search=SearchVector("dialogue"))
searched = line_qs.filter(
search=SearchQuery("burned body his away", search_type="phrase")
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery("his body burned away", search_type="phrase")
)
self.assertSequenceEqual(searched, [self.verse1])
def test_phrase_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
)
searched = line_qs.filter(
search=SearchQuery("cadeau beau un", search_type="phrase", config="french"),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery("un beau cadeau", search_type="phrase", config="french"),
)
self.assertSequenceEqual(searched, [self.french])
def test_raw_search(self):
line_qs = Line.objects.annotate(search=SearchVector("dialogue"))
searched = line_qs.filter(search=SearchQuery("Robin", search_type="raw"))
self.assertCountEqual(searched, [self.verse0, self.verse1])
searched = line_qs.filter(
search=SearchQuery("Robin & !'Camelot'", search_type="raw")
)
self.assertSequenceEqual(searched, [self.verse1])
def test_raw_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector("dialogue", config="french")
)
searched = line_qs.filter(
search=SearchQuery(
"'cadeaux' & 'beaux'", search_type="raw", config="french"
),
)
self.assertSequenceEqual(searched, [self.french])
def test_web_search(self):
line_qs = Line.objects.annotate(search=SearchVector("dialogue"))
searched = line_qs.filter(
search=SearchQuery(
'"burned body" "split kneecaps"',
search_type="websearch",
),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery(
'"body burned" "kneecaps split" -"nostrils"',
search_type="websearch",
),
)
self.assertSequenceEqual(searched, [self.verse1])
searched = line_qs.filter(
search=SearchQuery(
'"Sir Robin" ("kneecaps" OR "Camelot")',
search_type="websearch",
),
)
self.assertSequenceEqual(searched, [self.verse0, self.verse1])
def test_web_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
)
searched = line_qs.filter(
search=SearchQuery(
"cadeau -beau", search_type="websearch", config="french"
),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery("beau cadeau", search_type="websearch", config="french"),
)
self.assertSequenceEqual(searched, [self.french])
def test_bad_search_type(self):
with self.assertRaisesMessage(
ValueError, "Unknown search_type argument 'foo'."
):
SearchQuery("kneecaps", search_type="foo")
def test_config_query_explicit(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
).filter(search=SearchQuery("cadeaux", config="french"))
self.assertSequenceEqual(searched, [self.french])
def test_config_query_implicit(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
).filter(search="cadeaux")
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_explicit(self):
searched = Line.objects.annotate(
search=SearchVector(
"scene__setting", "dialogue", config=F("dialogue_config")
),
).filter(search=SearchQuery("cadeaux", config=F("dialogue_config")))
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_implicit(self):
searched = Line.objects.annotate(
search=SearchVector(
"scene__setting", "dialogue", config=F("dialogue_config")
),
).filter(search="cadeaux")
self.assertSequenceEqual(searched, [self.french])
class TestCombinations(GrailTestData, PostgreSQLTestCase):
def test_vector_add(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting") + SearchVector("character__name"),
).filter(search="bedemir")
self.assertCountEqual(
searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck]
)
def test_vector_add_multi(self):
searched = Line.objects.annotate(
search=(
SearchVector("scene__setting")
+ SearchVector("character__name")
+ SearchVector("dialogue")
),
).filter(search="bedemir")
self.assertCountEqual(
searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck]
)
def test_vector_combined_mismatch(self):
msg = (
"SearchVector can only be combined with other SearchVector "
"instances, got NoneType."
)
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None + SearchVector("character__name"))
def test_combine_different_vector_configs(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
search=(
SearchVector("dialogue", config="english")
+ SearchVector("dialogue", config="french")
),
).filter(
search=SearchQuery("cadeaux", config="french") | SearchQuery("nostrils")
)
self.assertCountEqual(searched, [self.french, self.verse2])
def test_query_and(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search=SearchQuery("bedemir") & SearchQuery("scales"))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_multiple_and(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(
search=SearchQuery("bedemir")
& SearchQuery("scales")
& SearchQuery("nostrils")
)
self.assertSequenceEqual(searched, [])
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(
search=SearchQuery("shall") & SearchQuery("use") & SearchQuery("larger")
)
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_or(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery("kneecaps") | SearchQuery("nostrils")
)
self.assertCountEqual(searched, [self.verse1, self.verse2])
def test_query_multiple_or(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery("kneecaps")
| SearchQuery("nostrils")
| SearchQuery("Sir Robin")
)
self.assertCountEqual(searched, [self.verse1, self.verse2, self.verse0])
def test_query_invert(self):
searched = Line.objects.filter(
character=self.minstrel, dialogue__search=~SearchQuery("kneecaps")
)
self.assertCountEqual(searched, [self.verse0, self.verse2])
def test_combine_different_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery("cadeau", config="french")
| SearchQuery("nostrils", config="english")
)
)
self.assertCountEqual(searched, [self.french, self.verse2])
def test_combined_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery("nostrils", config="simple")
& SearchQuery("bowels", config="simple")
),
)
self.assertSequenceEqual(searched, [self.verse2])
def test_combine_raw_phrase(self):
self.check_default_text_search_config()
searched = Line.objects.filter(
dialogue__search=(
SearchQuery("burn:*", search_type="raw", config="simple")
| SearchQuery("rode forth from Camelot", search_type="phrase")
)
)
self.assertCountEqual(searched, [self.verse0, self.verse1, self.verse2])
def test_query_combined_mismatch(self):
msg = (
"SearchQuery can only be combined with other SearchQuery "
"instances, got NoneType."
)
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None | SearchQuery("kneecaps"))
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None & SearchQuery("kneecaps"))
class TestRankingAndWeights(GrailTestData, PostgreSQLTestCase):
def test_ranking(self):
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"), SearchQuery("brave sir robin")
),
)
.order_by("rank")
)
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_rank_passing_untyped_args(self):
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank("dialogue", "brave sir robin"),
)
.order_by("rank")
)
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_weights_in_vector(self):
vector = SearchVector("dialogue", weight="A") + SearchVector(
"character__name", weight="D"
)
searched = (
Line.objects.filter(scene=self.witch_scene)
.annotate(
rank=SearchRank(vector, SearchQuery("witch")),
)
.order_by("-rank")[:2]
)
self.assertSequenceEqual(searched, [self.crowd, self.witch])
vector = SearchVector("dialogue", weight="D") + SearchVector(
"character__name", weight="A"
)
searched = (
Line.objects.filter(scene=self.witch_scene)
.annotate(
rank=SearchRank(vector, SearchQuery("witch")),
)
.order_by("-rank")[:2]
)
self.assertSequenceEqual(searched, [self.witch, self.crowd])
def test_ranked_custom_weights(self):
vector = SearchVector("dialogue", weight="D") + SearchVector(
"character__name", weight="A"
)
weights = [1.0, 0.0, 0.0, 0.5]
searched = (
Line.objects.filter(scene=self.witch_scene)
.annotate(
rank=SearchRank(vector, SearchQuery("witch"), weights=weights),
)
.order_by("-rank")[:2]
)
self.assertSequenceEqual(searched, [self.crowd, self.witch])
def test_ranking_chaining(self):
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"), SearchQuery("brave sir robin")
),
)
.filter(rank__gt=0.3)
)
self.assertSequenceEqual(searched, [self.verse0])
def test_cover_density_ranking(self):
not_dense_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue=(
"Bravely taking to his feet, he beat a very brave retreat. "
"A brave retreat brave Sir Robin."
),
)
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"),
SearchQuery("brave robin"),
cover_density=True,
),
)
.order_by("rank", "-pk")
)
self.assertSequenceEqual(
searched,
[self.verse2, not_dense_verse, self.verse1, self.verse0],
)
def test_ranking_with_normalization(self):
short_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue="A brave retreat brave Sir Robin.",
)
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"),
SearchQuery("brave sir robin"),
# Divide the rank by the document length.
normalization=2,
),
)
.order_by("rank")
)
self.assertSequenceEqual(
searched,
[self.verse2, self.verse1, self.verse0, short_verse],
)
def test_ranking_with_masked_normalization(self):
short_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue="A brave retreat brave Sir Robin.",
)
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"),
SearchQuery("brave sir robin"),
# Divide the rank by the document length and by the number of
# unique words in document.
normalization=Value(2).bitor(Value(8)),
),
)
.order_by("rank")
)
self.assertSequenceEqual(
searched,
[self.verse2, self.verse1, self.verse0, short_verse],
)
class SearchVectorIndexTests(PostgreSQLTestCase):
def test_search_vector_index(self):
"""SearchVector generates IMMUTABLE SQL in order to be indexable."""
# This test should be moved to test_indexes and use a functional
# index instead once support lands (see #26167).
query = Line.objects.all().query
resolved = SearchVector("id", "dialogue", config="english").resolve_expression(
query
)
compiler = query.get_compiler(connection.alias)
sql, params = resolved.as_sql(compiler, connection)
# Indexed function must be IMMUTABLE.
with connection.cursor() as cursor:
cursor.execute(
"CREATE INDEX search_vector_index ON %s USING GIN (%s)"
% (Line._meta.db_table, sql),
params,
)
class SearchQueryTests(PostgreSQLSimpleTestCase):
def test_str(self):
tests = (
(~SearchQuery("a"), "~SearchQuery(Value('a'))"),
(
(SearchQuery("a") | SearchQuery("b"))
& (SearchQuery("c") | SearchQuery("d")),
"((SearchQuery(Value('a')) || SearchQuery(Value('b'))) && "
"(SearchQuery(Value('c')) || SearchQuery(Value('d'))))",
),
(
SearchQuery("a") & (SearchQuery("b") | SearchQuery("c")),
"(SearchQuery(Value('a')) && (SearchQuery(Value('b')) || "
"SearchQuery(Value('c'))))",
),
(
(SearchQuery("a") | SearchQuery("b")) & SearchQuery("c"),
"((SearchQuery(Value('a')) || SearchQuery(Value('b'))) && "
"SearchQuery(Value('c')))",
),
(
SearchQuery("a")
& (SearchQuery("b") & (SearchQuery("c") | SearchQuery("d"))),
"(SearchQuery(Value('a')) && (SearchQuery(Value('b')) && "
"(SearchQuery(Value('c')) || SearchQuery(Value('d')))))",
),
)
for query, expected_str in tests:
with self.subTest(query=query):
self.assertEqual(str(query), expected_str)
class SearchHeadlineTests(GrailTestData, PostgreSQLTestCase):
def test_headline(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
headline=SearchHeadline(
F("dialogue"),
SearchQuery("brave sir robin"),
config=SearchConfig("english"),
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"<b>Robin</b>. He was not at all afraid to be killed in nasty "
"ways. <b>Brave</b>, <b>brave</b>, <b>brave</b>, <b>brave</b> "
"<b>Sir</b> <b>Robin</b>",
)
def test_headline_untyped_args(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
headline=SearchHeadline("dialogue", "killed", config="english"),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"Robin. He was not at all afraid to be <b>killed</b> in nasty "
"ways. Brave, brave, brave, brave Sir Robin",
)
def test_headline_with_config(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("cadeaux", config="french"),
config="french",
),
).get(pk=self.french.pk)
self.assertEqual(
searched.headline,
"Oh. Un beau <b>cadeau</b>. Oui oui.",
)
def test_headline_with_config_from_field(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("cadeaux", config=F("dialogue_config")),
config=F("dialogue_config"),
),
).get(pk=self.french.pk)
self.assertEqual(
searched.headline,
"Oh. Un beau <b>cadeau</b>. Oui oui.",
)
def test_headline_separator_options(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
"brave sir robin",
start_sel="<span>",
stop_sel="</span>",
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"<span>Robin</span>. He was not at all afraid to be killed in "
"nasty ways. <span>Brave</span>, <span>brave</span>, <span>brave"
"</span>, <span>brave</span> <span>Sir</span> <span>Robin</span>",
)
def test_headline_highlight_all_option(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("brave sir robin", config="english"),
highlight_all=True,
),
).get(pk=self.verse0.pk)
self.assertIn(
"<b>Bravely</b> bold <b>Sir</b> <b>Robin</b>, rode forth from "
"Camelot. He was not afraid to die, o ",
searched.headline,
)
def test_headline_short_word_option(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("Camelot", config="english"),
short_word=5,
min_words=8,
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
(
"<b>Camelot</b>. He was not afraid to die, o Brave Sir Robin. He "
"was not at all afraid"
),
)
def test_headline_fragments_words_options(self):
self.check_default_text_search_config()
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("brave sir robin", config="english"),
fragment_delimiter="...<br>",
max_fragments=4,
max_words=3,
min_words=1,
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"<b>Sir</b> <b>Robin</b>, rode...<br>"
"<b>Brave</b> <b>Sir</b> <b>Robin</b>...<br>"
"<b>Brave</b>, <b>brave</b>, <b>brave</b>...<br>"
"<b>brave</b> <b>Sir</b> <b>Robin</b>",
)
|
9a9df49910dd84ed3727d722172b448d761e2f832e005ba87804a9c60a86b435 | import datetime
import json
from decimal import Decimal
from django import forms
from django.core import exceptions, serializers
from django.db.models import DateField, DateTimeField, F, Func, Value
from django.http import QueryDict
from django.test import override_settings
from django.test.utils import isolate_apps
from django.utils import timezone
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import (
BigAutoFieldModel,
PostgreSQLModel,
RangeLookupsModel,
RangesModel,
SmallAutoFieldModel,
)
try:
from django.contrib.postgres import fields as pg_fields
from django.contrib.postgres import forms as pg_forms
from django.contrib.postgres.validators import (
RangeMaxValueValidator,
RangeMinValueValidator,
)
from django.db.backends.postgresql.psycopg_any import (
DateRange,
DateTimeTZRange,
NumericRange,
)
except ImportError:
pass
@isolate_apps("postgres_tests")
class BasicTests(PostgreSQLSimpleTestCase):
def test_get_field_display(self):
class Model(PostgreSQLModel):
field = pg_fields.IntegerRangeField(
choices=[
["1-50", [((1, 25), "1-25"), ([26, 50], "26-50")]],
((51, 100), "51-100"),
],
)
tests = (
((1, 25), "1-25"),
([26, 50], "26-50"),
((51, 100), "51-100"),
((1, 2), "(1, 2)"),
([1, 2], "[1, 2]"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = Model(field=value)
self.assertEqual(instance.get_field_display(), display)
def test_discrete_range_fields_unsupported_default_bounds(self):
discrete_range_types = [
pg_fields.BigIntegerRangeField,
pg_fields.IntegerRangeField,
pg_fields.DateRangeField,
]
for field_type in discrete_range_types:
msg = f"Cannot use 'default_bounds' with {field_type.__name__}."
with self.assertRaisesMessage(TypeError, msg):
field_type(choices=[((51, 100), "51-100")], default_bounds="[]")
def test_continuous_range_fields_default_bounds(self):
continuous_range_types = [
pg_fields.DecimalRangeField,
pg_fields.DateTimeRangeField,
]
for field_type in continuous_range_types:
field = field_type(choices=[((51, 100), "51-100")], default_bounds="[]")
self.assertEqual(field.default_bounds, "[]")
def test_invalid_default_bounds(self):
tests = [")]", ")[", "](", "])", "([", "[(", "x", "", None]
msg = "default_bounds must be one of '[)', '(]', '()', or '[]'."
for invalid_bounds in tests:
with self.assertRaisesMessage(ValueError, msg):
pg_fields.DecimalRangeField(default_bounds=invalid_bounds)
def test_deconstruct(self):
field = pg_fields.DecimalRangeField()
*_, kwargs = field.deconstruct()
self.assertEqual(kwargs, {})
field = pg_fields.DecimalRangeField(default_bounds="[]")
*_, kwargs = field.deconstruct()
self.assertEqual(kwargs, {"default_bounds": "[]"})
class TestSaveLoad(PostgreSQLTestCase):
def test_all_fields(self):
now = timezone.now()
instance = RangesModel(
ints=NumericRange(0, 10),
bigints=NumericRange(10, 20),
decimals=NumericRange(20, 30),
timestamps=DateTimeTZRange(now - datetime.timedelta(hours=1), now),
dates=DateRange(now.date() - datetime.timedelta(days=1), now.date()),
)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(instance.ints, loaded.ints)
self.assertEqual(instance.bigints, loaded.bigints)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.timestamps, loaded.timestamps)
self.assertEqual(instance.dates, loaded.dates)
def test_range_object(self):
r = NumericRange(0, 10)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_tuple(self):
instance = RangesModel(ints=(0, 10))
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(NumericRange(0, 10), loaded.ints)
def test_tuple_range_with_default_bounds(self):
range_ = (timezone.now(), timezone.now() + datetime.timedelta(hours=1))
RangesModel.objects.create(timestamps_closed_bounds=range_, timestamps=range_)
loaded = RangesModel.objects.get()
self.assertEqual(
loaded.timestamps_closed_bounds,
DateTimeTZRange(range_[0], range_[1], "[]"),
)
self.assertEqual(
loaded.timestamps,
DateTimeTZRange(range_[0], range_[1], "[)"),
)
def test_range_object_boundaries(self):
r = NumericRange(0, 10, "[]")
instance = RangesModel(decimals=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.decimals)
self.assertIn(10, loaded.decimals)
def test_range_object_boundaries_range_with_default_bounds(self):
range_ = DateTimeTZRange(
timezone.now(),
timezone.now() + datetime.timedelta(hours=1),
bounds="()",
)
RangesModel.objects.create(timestamps_closed_bounds=range_)
loaded = RangesModel.objects.get()
self.assertEqual(loaded.timestamps_closed_bounds, range_)
def test_unbounded(self):
r = NumericRange(None, None, "()")
instance = RangesModel(decimals=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.decimals)
def test_empty(self):
r = NumericRange(empty=True)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_null(self):
instance = RangesModel(ints=None)
instance.save()
loaded = RangesModel.objects.get()
self.assertIsNone(loaded.ints)
def test_model_set_on_base_field(self):
instance = RangesModel()
field = instance._meta.get_field("ints")
self.assertEqual(field.model, RangesModel)
self.assertEqual(field.base_field.model, RangesModel)
class TestRangeContainsLookup(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.timestamps = [
datetime.datetime(year=2016, month=1, day=1),
datetime.datetime(year=2016, month=1, day=2, hour=1),
datetime.datetime(year=2016, month=1, day=2, hour=12),
datetime.datetime(year=2016, month=1, day=3),
datetime.datetime(year=2016, month=1, day=3, hour=1),
datetime.datetime(year=2016, month=2, day=2),
]
cls.aware_timestamps = [
timezone.make_aware(timestamp) for timestamp in cls.timestamps
]
cls.dates = [
datetime.date(year=2016, month=1, day=1),
datetime.date(year=2016, month=1, day=2),
datetime.date(year=2016, month=1, day=3),
datetime.date(year=2016, month=1, day=4),
datetime.date(year=2016, month=2, day=2),
datetime.date(year=2016, month=2, day=3),
]
cls.obj = RangesModel.objects.create(
dates=(cls.dates[0], cls.dates[3]),
dates_inner=(cls.dates[1], cls.dates[2]),
timestamps=(cls.timestamps[0], cls.timestamps[3]),
timestamps_inner=(cls.timestamps[1], cls.timestamps[2]),
)
cls.aware_obj = RangesModel.objects.create(
dates=(cls.dates[0], cls.dates[3]),
dates_inner=(cls.dates[1], cls.dates[2]),
timestamps=(cls.aware_timestamps[0], cls.aware_timestamps[3]),
timestamps_inner=(cls.timestamps[1], cls.timestamps[2]),
)
# Objects that don't match any queries.
for i in range(3, 4):
RangesModel.objects.create(
dates=(cls.dates[i], cls.dates[i + 1]),
timestamps=(cls.timestamps[i], cls.timestamps[i + 1]),
)
RangesModel.objects.create(
dates=(cls.dates[i], cls.dates[i + 1]),
timestamps=(cls.aware_timestamps[i], cls.aware_timestamps[i + 1]),
)
def test_datetime_range_contains(self):
filter_args = (
self.timestamps[1],
self.aware_timestamps[1],
(self.timestamps[1], self.timestamps[2]),
(self.aware_timestamps[1], self.aware_timestamps[2]),
Value(self.dates[0]),
Func(F("dates"), function="lower", output_field=DateTimeField()),
F("timestamps_inner"),
)
for filter_arg in filter_args:
with self.subTest(filter_arg=filter_arg):
self.assertCountEqual(
RangesModel.objects.filter(**{"timestamps__contains": filter_arg}),
[self.obj, self.aware_obj],
)
def test_date_range_contains(self):
filter_args = (
self.timestamps[1],
(self.dates[1], self.dates[2]),
Value(self.dates[0], output_field=DateField()),
Func(F("timestamps"), function="lower", output_field=DateField()),
F("dates_inner"),
)
for filter_arg in filter_args:
with self.subTest(filter_arg=filter_arg):
self.assertCountEqual(
RangesModel.objects.filter(**{"dates__contains": filter_arg}),
[self.obj, self.aware_obj],
)
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = RangesModel.objects.bulk_create(
[
RangesModel(ints=NumericRange(0, 10)),
RangesModel(ints=NumericRange(5, 15)),
RangesModel(ints=NumericRange(None, 0)),
RangesModel(ints=NumericRange(empty=True)),
RangesModel(ints=None),
]
)
def test_exact(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__exact=NumericRange(0, 10)),
[self.objs[0]],
)
def test_isnull(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isnull=True),
[self.objs[4]],
)
def test_isempty(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isempty=True),
[self.objs[3]],
)
def test_contains(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=8),
[self.objs[0], self.objs[1]],
)
def test_contains_range(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=NumericRange(3, 8)),
[self.objs[0]],
)
def test_contained_by(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contained_by=NumericRange(0, 20)),
[self.objs[0], self.objs[1], self.objs[3]],
)
def test_overlap(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__overlap=NumericRange(3, 8)),
[self.objs[0], self.objs[1]],
)
def test_fully_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_lt=NumericRange(5, 10)),
[self.objs[2]],
)
def test_fully_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_gt=NumericRange(5, 10)),
[],
)
def test_not_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_lt=NumericRange(5, 10)),
[self.objs[1]],
)
def test_not_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_gt=NumericRange(5, 10)),
[self.objs[0], self.objs[2]],
)
def test_adjacent_to(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__adjacent_to=NumericRange(0, 5)),
[self.objs[1], self.objs[2]],
)
def test_startswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith=0),
[self.objs[0]],
)
def test_endswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__endswith=0),
[self.objs[2]],
)
def test_startswith_chaining(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith__gte=0),
[self.objs[0], self.objs[1]],
)
def test_bound_type(self):
decimals = RangesModel.objects.bulk_create(
[
RangesModel(decimals=NumericRange(None, 10)),
RangesModel(decimals=NumericRange(10, None)),
RangesModel(decimals=NumericRange(5, 15)),
RangesModel(decimals=NumericRange(5, 15, "(]")),
]
)
tests = [
("lower_inc", True, [decimals[1], decimals[2]]),
("lower_inc", False, [decimals[0], decimals[3]]),
("lower_inf", True, [decimals[0]]),
("lower_inf", False, [decimals[1], decimals[2], decimals[3]]),
("upper_inc", True, [decimals[3]]),
("upper_inc", False, [decimals[0], decimals[1], decimals[2]]),
("upper_inf", True, [decimals[1]]),
("upper_inf", False, [decimals[0], decimals[2], decimals[3]]),
]
for lookup, filter_arg, excepted_result in tests:
with self.subTest(lookup=lookup, filter_arg=filter_arg):
self.assertSequenceEqual(
RangesModel.objects.filter(**{"decimals__%s" % lookup: filter_arg}),
excepted_result,
)
class TestQueryingWithRanges(PostgreSQLTestCase):
def test_date_range(self):
objs = [
RangeLookupsModel.objects.create(date="2015-01-01"),
RangeLookupsModel.objects.create(date="2015-05-05"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
date__contained_by=DateRange("2015-01-01", "2015-05-04")
),
[objs[0]],
)
def test_date_range_datetime_field(self):
objs = [
RangeLookupsModel.objects.create(timestamp="2015-01-01"),
RangeLookupsModel.objects.create(timestamp="2015-05-05"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
timestamp__date__contained_by=DateRange("2015-01-01", "2015-05-04")
),
[objs[0]],
)
def test_datetime_range(self):
objs = [
RangeLookupsModel.objects.create(timestamp="2015-01-01T09:00:00"),
RangeLookupsModel.objects.create(timestamp="2015-05-05T17:00:00"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
timestamp__contained_by=DateTimeTZRange(
"2015-01-01T09:00", "2015-05-04T23:55"
)
),
[objs[0]],
)
def test_small_integer_field_contained_by(self):
objs = [
RangeLookupsModel.objects.create(small_integer=8),
RangeLookupsModel.objects.create(small_integer=4),
RangeLookupsModel.objects.create(small_integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
small_integer__contained_by=NumericRange(4, 6)
),
[objs[1]],
)
def test_integer_range(self):
objs = [
RangeLookupsModel.objects.create(integer=5),
RangeLookupsModel.objects.create(integer=99),
RangeLookupsModel.objects.create(integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(integer__contained_by=NumericRange(1, 98)),
[objs[0]],
)
def test_biginteger_range(self):
objs = [
RangeLookupsModel.objects.create(big_integer=5),
RangeLookupsModel.objects.create(big_integer=99),
RangeLookupsModel.objects.create(big_integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
big_integer__contained_by=NumericRange(1, 98)
),
[objs[0]],
)
def test_decimal_field_contained_by(self):
objs = [
RangeLookupsModel.objects.create(decimal_field=Decimal("1.33")),
RangeLookupsModel.objects.create(decimal_field=Decimal("2.88")),
RangeLookupsModel.objects.create(decimal_field=Decimal("99.17")),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
decimal_field__contained_by=NumericRange(
Decimal("1.89"), Decimal("7.91")
),
),
[objs[1]],
)
def test_float_range(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=NumericRange(1, 98)),
[objs[0]],
)
def test_small_auto_field_contained_by(self):
objs = SmallAutoFieldModel.objects.bulk_create(
[SmallAutoFieldModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
SmallAutoFieldModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_auto_field_contained_by(self):
objs = RangeLookupsModel.objects.bulk_create(
[RangeLookupsModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_big_auto_field_contained_by(self):
objs = BigAutoFieldModel.objects.bulk_create(
[BigAutoFieldModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
BigAutoFieldModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_f_ranges(self):
parent = RangesModel.objects.create(decimals=NumericRange(0, 10))
objs = [
RangeLookupsModel.objects.create(float=5, parent=parent),
RangeLookupsModel.objects.create(float=99, parent=parent),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=F("parent__decimals")),
[objs[0]],
)
def test_exclude(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.exclude(float__contained_by=NumericRange(0, 100)),
[objs[2]],
)
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = (
'[{"fields": {"ints": "{\\"upper\\": \\"10\\", \\"lower\\": \\"0\\", '
'\\"bounds\\": \\"[)\\"}", "decimals": "{\\"empty\\": true}", '
'"bigints": null, "timestamps": '
'"{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", '
'\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"[)\\"}", '
'"timestamps_inner": null, '
'"timestamps_closed_bounds": "{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", '
'\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"()\\"}", '
'"dates": "{\\"upper\\": \\"2014-02-02\\", \\"lower\\": \\"2014-01-01\\", '
'\\"bounds\\": \\"[)\\"}", "dates_inner": null }, '
'"model": "postgres_tests.rangesmodel", "pk": null}]'
)
lower_date = datetime.date(2014, 1, 1)
upper_date = datetime.date(2014, 2, 2)
lower_dt = datetime.datetime(2014, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc)
upper_dt = datetime.datetime(2014, 2, 2, 12, 12, 12, tzinfo=datetime.timezone.utc)
def test_dumping(self):
instance = RangesModel(
ints=NumericRange(0, 10),
decimals=NumericRange(empty=True),
timestamps=DateTimeTZRange(self.lower_dt, self.upper_dt),
timestamps_closed_bounds=DateTimeTZRange(
self.lower_dt,
self.upper_dt,
bounds="()",
),
dates=DateRange(self.lower_date, self.upper_date),
)
data = serializers.serialize("json", [instance])
dumped = json.loads(data)
for field in ("ints", "dates", "timestamps", "timestamps_closed_bounds"):
dumped[0]["fields"][field] = json.loads(dumped[0]["fields"][field])
check = json.loads(self.test_data)
for field in ("ints", "dates", "timestamps", "timestamps_closed_bounds"):
check[0]["fields"][field] = json.loads(check[0]["fields"][field])
self.assertEqual(dumped, check)
def test_loading(self):
instance = list(serializers.deserialize("json", self.test_data))[0].object
self.assertEqual(instance.ints, NumericRange(0, 10))
self.assertEqual(instance.decimals, NumericRange(empty=True))
self.assertIsNone(instance.bigints)
self.assertEqual(instance.dates, DateRange(self.lower_date, self.upper_date))
self.assertEqual(
instance.timestamps, DateTimeTZRange(self.lower_dt, self.upper_dt)
)
self.assertEqual(
instance.timestamps_closed_bounds,
DateTimeTZRange(self.lower_dt, self.upper_dt, bounds="()"),
)
def test_serialize_range_with_null(self):
instance = RangesModel(ints=NumericRange(None, 10))
data = serializers.serialize("json", [instance])
new_instance = list(serializers.deserialize("json", data))[0].object
self.assertEqual(new_instance.ints, NumericRange(None, 10))
instance = RangesModel(ints=NumericRange(10, None))
data = serializers.serialize("json", [instance])
new_instance = list(serializers.deserialize("json", data))[0].object
self.assertEqual(new_instance.ints, NumericRange(10, None))
class TestChecks(PostgreSQLSimpleTestCase):
def test_choices_tuple_list(self):
class Model(PostgreSQLModel):
field = pg_fields.IntegerRangeField(
choices=[
["1-50", [((1, 25), "1-25"), ([26, 50], "26-50")]],
((51, 100), "51-100"),
],
)
self.assertEqual(Model._meta.get_field("field").check(), [])
class TestValidators(PostgreSQLSimpleTestCase):
def test_max(self):
validator = RangeMaxValueValidator(5)
validator(NumericRange(0, 5))
msg = "Ensure that the upper bound of the range is not greater than 5."
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], msg)
self.assertEqual(cm.exception.code, "max_value")
with self.assertRaisesMessage(exceptions.ValidationError, msg):
validator(NumericRange(0, None)) # an unbound range
def test_min(self):
validator = RangeMinValueValidator(5)
validator(NumericRange(10, 15))
msg = "Ensure that the lower bound of the range is not less than 5."
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], msg)
self.assertEqual(cm.exception.code, "min_value")
with self.assertRaisesMessage(exceptions.ValidationError, msg):
validator(NumericRange(None, 10)) # an unbound range
class TestFormField(PostgreSQLSimpleTestCase):
def test_valid_integer(self):
field = pg_forms.IntegerRangeField()
value = field.clean(["1", "2"])
self.assertEqual(value, NumericRange(1, 2))
def test_valid_decimal(self):
field = pg_forms.DecimalRangeField()
value = field.clean(["1.12345", "2.001"])
self.assertEqual(value, NumericRange(Decimal("1.12345"), Decimal("2.001")))
def test_valid_timestamps(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(["01/01/2014 00:00:00", "02/02/2014 12:12:12"])
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(value, DateTimeTZRange(lower, upper))
def test_valid_dates(self):
field = pg_forms.DateRangeField()
value = field.clean(["01/01/2014", "02/02/2014"])
lower = datetime.date(2014, 1, 1)
upper = datetime.date(2014, 2, 2)
self.assertEqual(value, DateRange(lower, upper))
def test_using_split_datetime_widget(self):
class SplitDateTimeRangeField(pg_forms.DateTimeRangeField):
base_field = forms.SplitDateTimeField
class SplitForm(forms.Form):
field = SplitDateTimeRangeField()
form = SplitForm()
self.assertHTMLEqual(
str(form),
"""
<div>
<fieldset>
<legend>Field:</legend>
<input id="id_field_0_0" name="field_0_0" type="text">
<input id="id_field_0_1" name="field_0_1" type="text">
<input id="id_field_1_0" name="field_1_0" type="text">
<input id="id_field_1_1" name="field_1_1" type="text">
</fieldset>
</div>
""",
)
form = SplitForm(
{
"field_0_0": "01/01/2014",
"field_0_1": "00:00:00",
"field_1_0": "02/02/2014",
"field_1_1": "12:12:12",
}
)
self.assertTrue(form.is_valid())
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(form.cleaned_data["field"], DateTimeTZRange(lower, upper))
def test_none(self):
field = pg_forms.IntegerRangeField(required=False)
value = field.clean(["", ""])
self.assertIsNone(value)
def test_datetime_form_as_table(self):
class DateTimeRangeForm(forms.Form):
datetime_field = pg_forms.DateTimeRangeField(show_hidden_initial=True)
form = DateTimeRangeForm()
self.assertHTMLEqual(
form.as_table(),
"""
<tr><th>
<label>Datetime field:</label>
</th><td>
<input type="text" name="datetime_field_0" id="id_datetime_field_0">
<input type="text" name="datetime_field_1" id="id_datetime_field_1">
<input type="hidden" name="initial-datetime_field_0"
id="initial-id_datetime_field_0">
<input type="hidden" name="initial-datetime_field_1"
id="initial-id_datetime_field_1">
</td></tr>
""",
)
form = DateTimeRangeForm(
{
"datetime_field_0": "2010-01-01 11:13:00",
"datetime_field_1": "2020-12-12 16:59:00",
}
)
self.assertHTMLEqual(
form.as_table(),
"""
<tr><th>
<label>Datetime field:</label>
</th><td>
<input type="text" name="datetime_field_0"
value="2010-01-01 11:13:00" id="id_datetime_field_0">
<input type="text" name="datetime_field_1"
value="2020-12-12 16:59:00" id="id_datetime_field_1">
<input type="hidden" name="initial-datetime_field_0"
value="2010-01-01 11:13:00" id="initial-id_datetime_field_0">
<input type="hidden" name="initial-datetime_field_1"
value="2020-12-12 16:59:00" id="initial-id_datetime_field_1"></td></tr>
""",
)
def test_datetime_form_initial_data(self):
class DateTimeRangeForm(forms.Form):
datetime_field = pg_forms.DateTimeRangeField(show_hidden_initial=True)
data = QueryDict(mutable=True)
data.update(
{
"datetime_field_0": "2010-01-01 11:13:00",
"datetime_field_1": "",
"initial-datetime_field_0": "2010-01-01 10:12:00",
"initial-datetime_field_1": "",
}
)
form = DateTimeRangeForm(data=data)
self.assertTrue(form.has_changed())
data["initial-datetime_field_0"] = "2010-01-01 11:13:00"
form = DateTimeRangeForm(data=data)
self.assertFalse(form.has_changed())
def test_rendering(self):
class RangeForm(forms.Form):
ints = pg_forms.IntegerRangeField()
self.assertHTMLEqual(
str(RangeForm()),
"""
<div>
<fieldset>
<legend>Ints:</legend>
<input id="id_ints_0" name="ints_0" type="number">
<input id="id_ints_1" name="ints_1" type="number">
</fieldset>
</div>
""",
)
def test_integer_lower_bound_higher(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["10", "2"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_integer_open(self):
field = pg_forms.IntegerRangeField()
value = field.clean(["", "0"])
self.assertEqual(value, NumericRange(None, 0))
def test_integer_incorrect_data_type(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("1")
self.assertEqual(cm.exception.messages[0], "Enter two whole numbers.")
self.assertEqual(cm.exception.code, "invalid")
def test_integer_invalid_lower(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["a", "2"])
self.assertEqual(cm.exception.messages[0], "Enter a whole number.")
def test_integer_invalid_upper(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["1", "b"])
self.assertEqual(cm.exception.messages[0], "Enter a whole number.")
def test_integer_required(self):
field = pg_forms.IntegerRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean([1, ""])
self.assertEqual(value, NumericRange(1, None))
def test_decimal_lower_bound_higher(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["1.8", "1.6"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_decimal_open(self):
field = pg_forms.DecimalRangeField()
value = field.clean(["", "3.1415926"])
self.assertEqual(value, NumericRange(None, Decimal("3.1415926")))
def test_decimal_incorrect_data_type(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("1.6")
self.assertEqual(cm.exception.messages[0], "Enter two numbers.")
self.assertEqual(cm.exception.code, "invalid")
def test_decimal_invalid_lower(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["a", "3.1415926"])
self.assertEqual(cm.exception.messages[0], "Enter a number.")
def test_decimal_invalid_upper(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["1.61803399", "b"])
self.assertEqual(cm.exception.messages[0], "Enter a number.")
def test_decimal_required(self):
field = pg_forms.DecimalRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean(["1.61803399", ""])
self.assertEqual(value, NumericRange(Decimal("1.61803399"), None))
def test_date_lower_bound_higher(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2013-04-09", "1976-04-16"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_date_open(self):
field = pg_forms.DateRangeField()
value = field.clean(["", "2013-04-09"])
self.assertEqual(value, DateRange(None, datetime.date(2013, 4, 9)))
def test_date_incorrect_data_type(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("1")
self.assertEqual(cm.exception.messages[0], "Enter two valid dates.")
self.assertEqual(cm.exception.code, "invalid")
def test_date_invalid_lower(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["a", "2013-04-09"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date.")
def test_date_invalid_upper(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2013-04-09", "b"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date.")
def test_date_required(self):
field = pg_forms.DateRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean(["1976-04-16", ""])
self.assertEqual(value, DateRange(datetime.date(1976, 4, 16), None))
def test_date_has_changed_first(self):
self.assertTrue(
pg_forms.DateRangeField().has_changed(
["2010-01-01", "2020-12-12"],
["2010-01-31", "2020-12-12"],
)
)
def test_date_has_changed_last(self):
self.assertTrue(
pg_forms.DateRangeField().has_changed(
["2010-01-01", "2020-12-12"],
["2010-01-01", "2020-12-31"],
)
)
def test_datetime_lower_bound_higher(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2006-10-25 14:59", "2006-10-25 14:58"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_datetime_open(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(["", "2013-04-09 11:45"])
self.assertEqual(
value, DateTimeTZRange(None, datetime.datetime(2013, 4, 9, 11, 45))
)
def test_datetime_incorrect_data_type(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("2013-04-09 11:45")
self.assertEqual(cm.exception.messages[0], "Enter two valid date/times.")
self.assertEqual(cm.exception.code, "invalid")
def test_datetime_invalid_lower(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["45", "2013-04-09 11:45"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date/time.")
def test_datetime_invalid_upper(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2013-04-09 11:45", "sweet pickles"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date/time.")
def test_datetime_required(self):
field = pg_forms.DateTimeRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean(["2013-04-09 11:45", ""])
self.assertEqual(
value, DateTimeTZRange(datetime.datetime(2013, 4, 9, 11, 45), None)
)
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Johannesburg")
def test_datetime_prepare_value(self):
field = pg_forms.DateTimeRangeField()
value = field.prepare_value(
DateTimeTZRange(
datetime.datetime(2015, 5, 22, 16, 6, 33, tzinfo=datetime.timezone.utc),
None,
)
)
self.assertEqual(value, [datetime.datetime(2015, 5, 22, 18, 6, 33), None])
def test_datetime_has_changed_first(self):
self.assertTrue(
pg_forms.DateTimeRangeField().has_changed(
["2010-01-01 00:00", "2020-12-12 00:00"],
["2010-01-31 23:00", "2020-12-12 00:00"],
)
)
def test_datetime_has_changed_last(self):
self.assertTrue(
pg_forms.DateTimeRangeField().has_changed(
["2010-01-01 00:00", "2020-12-12 00:00"],
["2010-01-01 00:00", "2020-12-31 23:00"],
)
)
def test_model_field_formfield_integer(self):
model_field = pg_fields.IntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
self.assertEqual(form_field.range_kwargs, {})
def test_model_field_formfield_biginteger(self):
model_field = pg_fields.BigIntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
self.assertEqual(form_field.range_kwargs, {})
def test_model_field_formfield_float(self):
model_field = pg_fields.DecimalRangeField(default_bounds="()")
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DecimalRangeField)
self.assertEqual(form_field.range_kwargs, {"bounds": "()"})
def test_model_field_formfield_date(self):
model_field = pg_fields.DateRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateRangeField)
self.assertEqual(form_field.range_kwargs, {})
def test_model_field_formfield_datetime(self):
model_field = pg_fields.DateTimeRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateTimeRangeField)
self.assertEqual(
form_field.range_kwargs,
{"bounds": pg_fields.ranges.CANONICAL_RANGE_BOUNDS},
)
def test_model_field_formfield_datetime_default_bounds(self):
model_field = pg_fields.DateTimeRangeField(default_bounds="[]")
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateTimeRangeField)
self.assertEqual(form_field.range_kwargs, {"bounds": "[]"})
def test_model_field_with_default_bounds(self):
field = pg_forms.DateTimeRangeField(default_bounds="[]")
value = field.clean(["2014-01-01 00:00:00", "2014-02-03 12:13:14"])
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 3, 12, 13, 14)
self.assertEqual(value, DateTimeTZRange(lower, upper, "[]"))
def test_has_changed(self):
for field, value in (
(pg_forms.DateRangeField(), ["2010-01-01", "2020-12-12"]),
(pg_forms.DateTimeRangeField(), ["2010-01-01 11:13", "2020-12-12 14:52"]),
(pg_forms.IntegerRangeField(), [1, 2]),
(pg_forms.DecimalRangeField(), ["1.12345", "2.001"]),
):
with self.subTest(field=field.__class__.__name__):
self.assertTrue(field.has_changed(None, value))
self.assertTrue(field.has_changed([value[0], ""], value))
self.assertTrue(field.has_changed(["", value[1]], value))
self.assertFalse(field.has_changed(value, value))
class TestWidget(PostgreSQLSimpleTestCase):
def test_range_widget(self):
f = pg_forms.ranges.DateTimeRangeField()
self.assertHTMLEqual(
f.widget.render("datetimerange", ""),
'<input type="text" name="datetimerange_0">'
'<input type="text" name="datetimerange_1">',
)
self.assertHTMLEqual(
f.widget.render("datetimerange", None),
'<input type="text" name="datetimerange_0">'
'<input type="text" name="datetimerange_1">',
)
dt_range = DateTimeTZRange(
datetime.datetime(2006, 1, 10, 7, 30), datetime.datetime(2006, 2, 12, 9, 50)
)
self.assertHTMLEqual(
f.widget.render("datetimerange", dt_range),
'<input type="text" name="datetimerange_0" value="2006-01-10 07:30:00">'
'<input type="text" name="datetimerange_1" value="2006-02-12 09:50:00">',
)
def test_range_widget_render_tuple_value(self):
field = pg_forms.ranges.DateTimeRangeField()
dt_range_tuple = (
datetime.datetime(2022, 4, 22, 10, 24),
datetime.datetime(2022, 5, 12, 9, 25),
)
self.assertHTMLEqual(
field.widget.render("datetimerange", dt_range_tuple),
'<input type="text" name="datetimerange_0" value="2022-04-22 10:24:00">'
'<input type="text" name="datetimerange_1" value="2022-05-12 09:25:00">',
)
|
8268c4014cef7e374bc53c640eec2b37a364629ba37f38a10b7defb28da6b3c2 | import unittest
from forms_tests.widget_tests.base import WidgetTest
from django.db import connection
from django.test import SimpleTestCase, TestCase, modify_settings
from django.utils.functional import cached_property
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
# To register type handlers and locate the widget's template.
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class PostgreSQLSimpleTestCase(SimpleTestCase):
pass
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
# To register type handlers and locate the widget's template.
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class PostgreSQLTestCase(TestCase):
@cached_property
def default_text_search_config(self):
with connection.cursor() as cursor:
cursor.execute("SHOW default_text_search_config")
row = cursor.fetchone()
return row[0] if row else None
def check_default_text_search_config(self):
if self.default_text_search_config != "pg_catalog.english":
self.skipTest("The default text search config is not 'english'.")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
# To locate the widget's template.
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class PostgreSQLWidgetTestCase(WidgetTest, PostgreSQLSimpleTestCase):
pass
|
f4a314f8e40c2765ff72f359b6865a80697f6c02596177b343c628fe23cd651b | from django.db import connection
from . import PostgreSQLTestCase
from .models import CharFieldModel, TextFieldModel
class UnaccentTest(PostgreSQLTestCase):
Model = CharFieldModel
@classmethod
def setUpTestData(cls):
cls.Model.objects.bulk_create(
[
cls.Model(field="àéÖ"),
cls.Model(field="aeO"),
cls.Model(field="aeo"),
]
)
def test_unaccent(self):
self.assertQuerySetEqual(
self.Model.objects.filter(field__unaccent="aeO"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
def test_unaccent_chained(self):
"""
Unaccent can be used chained with a lookup (which should be the case
since unaccent implements the Transform API)
"""
self.assertQuerySetEqual(
self.Model.objects.filter(field__unaccent__iexact="aeO"),
["àéÖ", "aeO", "aeo"],
transform=lambda instance: instance.field,
ordered=False,
)
self.assertQuerySetEqual(
self.Model.objects.filter(field__unaccent__endswith="éÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
def test_unaccent_with_conforming_strings_off(self):
"""SQL is valid when standard_conforming_strings is off."""
with connection.cursor() as cursor:
cursor.execute("SHOW standard_conforming_strings")
disable_conforming_strings = cursor.fetchall()[0][0] == "on"
if disable_conforming_strings:
cursor.execute("SET standard_conforming_strings TO off")
try:
self.assertQuerySetEqual(
self.Model.objects.filter(field__unaccent__endswith="éÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
finally:
if disable_conforming_strings:
cursor.execute("SET standard_conforming_strings TO on")
def test_unaccent_accentuated_needle(self):
self.assertQuerySetEqual(
self.Model.objects.filter(field__unaccent="aéÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
class UnaccentTextFieldTest(UnaccentTest):
"""
TextField should have the exact same behavior as CharField
regarding unaccent lookups.
"""
Model = TextFieldModel
|
740f0ec6bc79031bea1b2cafe51c755a5bf6e4b698f50478f13fe5b5d22d05bf | from unittest import mock
from django.contrib.postgres.indexes import (
BloomIndex,
BrinIndex,
BTreeIndex,
GinIndex,
GistIndex,
HashIndex,
OpClass,
PostgresIndex,
SpGistIndex,
)
from django.db import NotSupportedError, connection
from django.db.models import CharField, F, Index, Q
from django.db.models.functions import Cast, Collate, Length, Lower
from django.test import skipUnlessDBFeature
from django.test.utils import register_lookup
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .fields import SearchVector, SearchVectorField
from .models import CharFieldModel, IntegerArrayModel, Scene, TextFieldModel
class IndexTestMixin:
def test_name_auto_generation(self):
index = self.index_class(fields=["field"])
index.set_name_with_model(CharFieldModel)
self.assertRegex(
index.name, r"postgres_te_field_[0-9a-f]{6}_%s" % self.index_class.suffix
)
def test_deconstruction_no_customization(self):
index = self.index_class(
fields=["title"], name="test_title_%s" % self.index_class.suffix
)
path, args, kwargs = index.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.indexes.%s" % self.index_class.__name__
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{"fields": ["title"], "name": "test_title_%s" % self.index_class.suffix},
)
def test_deconstruction_with_expressions_no_customization(self):
name = f"test_title_{self.index_class.suffix}"
index = self.index_class(Lower("title"), name=name)
path, args, kwargs = index.deconstruct()
self.assertEqual(
path,
f"django.contrib.postgres.indexes.{self.index_class.__name__}",
)
self.assertEqual(args, (Lower("title"),))
self.assertEqual(kwargs, {"name": name})
class BloomIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BloomIndex
def test_suffix(self):
self.assertEqual(BloomIndex.suffix, "bloom")
def test_deconstruction(self):
index = BloomIndex(fields=["title"], name="test_bloom", length=80, columns=[4])
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BloomIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_bloom",
"length": 80,
"columns": [4],
},
)
def test_invalid_fields(self):
msg = "Bloom indexes support a maximum of 32 fields."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"] * 33, name="test_bloom")
def test_invalid_columns(self):
msg = "BloomIndex.columns must be a list or tuple."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns="x")
msg = "BloomIndex.columns cannot have more values than fields."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns=[4, 3])
def test_invalid_columns_value(self):
msg = "BloomIndex.columns must contain integers from 1 to 4095."
for length in (0, 4096):
with self.subTest(length), self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns=[length])
def test_invalid_length(self):
msg = "BloomIndex.length must be None or an integer from 1 to 4096."
for length in (0, 4097):
with self.subTest(length), self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", length=length)
class BrinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BrinIndex
def test_suffix(self):
self.assertEqual(BrinIndex.suffix, "brin")
def test_deconstruction(self):
index = BrinIndex(
fields=["title"],
name="test_title_brin",
autosummarize=True,
pages_per_range=16,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BrinIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_brin",
"autosummarize": True,
"pages_per_range": 16,
},
)
def test_invalid_pages_per_range(self):
with self.assertRaisesMessage(
ValueError, "pages_per_range must be None or a positive integer"
):
BrinIndex(fields=["title"], name="test_title_brin", pages_per_range=0)
class BTreeIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BTreeIndex
def test_suffix(self):
self.assertEqual(BTreeIndex.suffix, "btree")
def test_deconstruction(self):
index = BTreeIndex(fields=["title"], name="test_title_btree", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BTreeIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_btree", "fillfactor": 80}
)
class GinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GinIndex
def test_suffix(self):
self.assertEqual(GinIndex.suffix, "gin")
def test_deconstruction(self):
index = GinIndex(
fields=["title"],
name="test_title_gin",
fastupdate=True,
gin_pending_list_limit=128,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.GinIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_gin",
"fastupdate": True,
"gin_pending_list_limit": 128,
},
)
class GistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GistIndex
def test_suffix(self):
self.assertEqual(GistIndex.suffix, "gist")
def test_deconstruction(self):
index = GistIndex(
fields=["title"], name="test_title_gist", buffering=False, fillfactor=80
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.GistIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_gist",
"buffering": False,
"fillfactor": 80,
},
)
class HashIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = HashIndex
def test_suffix(self):
self.assertEqual(HashIndex.suffix, "hash")
def test_deconstruction(self):
index = HashIndex(fields=["title"], name="test_title_hash", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.HashIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_hash", "fillfactor": 80}
)
class SpGistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = SpGistIndex
def test_suffix(self):
self.assertEqual(SpGistIndex.suffix, "spgist")
def test_deconstruction(self):
index = SpGistIndex(fields=["title"], name="test_title_spgist", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.SpGistIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_spgist", "fillfactor": 80}
)
class SchemaTests(PostgreSQLTestCase):
get_opclass_query = """
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = %s
"""
def get_constraints(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_gin_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn(
"field", self.get_constraints(IntegerArrayModel._meta.db_table)
)
# Add the index
index_name = "integer_array_model_field_gin"
index = GinIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
# Check gin index was added
self.assertEqual(constraints[index_name]["type"], GinIndex.suffix)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_gin_fastupdate(self):
index_name = "integer_array_gin_fastupdate"
index = GinIndex(fields=["field"], name=index_name, fastupdate=False)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
self.assertEqual(constraints[index_name]["options"], ["fastupdate=off"])
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_partial_gin_index(self):
with register_lookup(CharField, Length):
index_name = "char_field_gin_partial_idx"
index = GinIndex(
fields=["field"], name=index_name, condition=Q(field__length=40)
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_partial_gin_index_with_tablespace(self):
with register_lookup(CharField, Length):
index_name = "char_field_gin_partial_idx"
index = GinIndex(
fields=["field"],
name=index_name,
condition=Q(field__length=40),
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
self.assertIn(
'TABLESPACE "pg_default" ',
str(index.create_sql(CharFieldModel, editor)),
)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gin_parameters(self):
index_name = "integer_array_gin_params"
index = GinIndex(
fields=["field"],
name=index_name,
fastupdate=True,
gin_pending_list_limit=64,
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
self.assertIn(
") WITH (gin_pending_list_limit = 64, fastupdate = on) TABLESPACE",
str(index.create_sql(IntegerArrayModel, editor)),
)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
self.assertEqual(
constraints[index_name]["options"],
["gin_pending_list_limit=64", "fastupdate=on"],
)
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_trigram_op_class_gin_index(self):
index_name = "trigram_op_class_gin"
index = GinIndex(OpClass(F("scene"), name="gin_trgm_ops"), name=index_name)
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("gin_trgm_ops", index_name)])
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GinIndex.suffix)
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_cast_search_vector_gin_index(self):
index_name = "cast_search_vector_gin"
index = GinIndex(Cast("field", SearchVectorField()), name=index_name)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
sql = index.create_sql(TextFieldModel, editor)
table = TextFieldModel._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GinIndex.suffix)
self.assertIs(sql.references_column(table, "field"), True)
self.assertIn("::tsvector", str(sql))
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_bloom_index(self):
index_name = "char_field_model_field_bloom"
index = BloomIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BloomIndex.suffix)
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_bloom_parameters(self):
index_name = "char_field_model_field_bloom_params"
index = BloomIndex(fields=["field"], name=index_name, length=512, columns=[3])
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BloomIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["length=512", "col1=3"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_brin_index(self):
index_name = "char_field_model_field_brin"
index = BrinIndex(fields=["field"], name=index_name, pages_per_range=4)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BrinIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["pages_per_range=4"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_brin_parameters(self):
index_name = "char_field_brin_params"
index = BrinIndex(fields=["field"], name=index_name, autosummarize=True)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BrinIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["autosummarize=on"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_btree_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_btree"
index = BTreeIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], BTreeIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_btree_parameters(self):
index_name = "integer_array_btree_fillfactor"
index = BTreeIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BTreeIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_gist"
index = GistIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gist_parameters(self):
index_name = "integer_array_gist_buffering"
index = GistIndex(
fields=["field"], name=index_name, buffering=True, fillfactor=80
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
self.assertEqual(
constraints[index_name]["options"], ["buffering=on", "fillfactor=80"]
)
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gist_include(self):
index_name = "scene_gist_include_setting"
index = GistIndex(name=index_name, fields=["scene"], include=["setting"])
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
self.assertEqual(constraints[index_name]["columns"], ["scene", "setting"])
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_tsvector_op_class_gist_index(self):
index_name = "tsvector_op_class_gist"
index = GistIndex(
OpClass(
SearchVector("scene", "setting", config="english"),
name="tsvector_ops",
),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
sql = index.create_sql(Scene, editor)
table = Scene._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GistIndex.suffix)
self.assertIs(sql.references_column(table, "scene"), True)
self.assertIs(sql.references_column(table, "setting"), True)
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_hash_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_hash"
index = HashIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], HashIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_hash_parameters(self):
index_name = "integer_array_hash_fillfactor"
index = HashIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], HashIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_spgist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(TextFieldModel._meta.db_table))
# Add the index.
index_name = "text_field_model_field_spgist"
index = SpGistIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
constraints = self.get_constraints(TextFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(TextFieldModel._meta.db_table)
)
def test_spgist_parameters(self):
index_name = "text_field_model_spgist_fillfactor"
index = SpGistIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(TextFieldModel._meta.db_table)
)
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_spgist_include(self):
index_name = "scene_spgist_include_setting"
index = SpGistIndex(name=index_name, fields=["scene"], include=["setting"])
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
self.assertEqual(constraints[index_name]["columns"], ["scene", "setting"])
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_spgist_include_not_supported(self):
index_name = "spgist_include_exception"
index = SpGistIndex(fields=["scene"], name=index_name, include=["setting"])
msg = "Covering SP-GiST indexes require PostgreSQL 14+."
with self.assertRaisesMessage(NotSupportedError, msg):
with mock.patch(
"django.db.backends.postgresql.features.DatabaseFeatures."
"supports_covering_spgist_indexes",
False,
):
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_custom_suffix(self):
class CustomSuffixIndex(PostgresIndex):
suffix = "sfx"
def create_sql(self, model, schema_editor, using="gin", **kwargs):
return super().create_sql(model, schema_editor, using=using, **kwargs)
index = CustomSuffixIndex(fields=["field"], name="custom_suffix_idx")
self.assertEqual(index.suffix, "sfx")
with connection.schema_editor() as editor:
self.assertIn(
" USING gin ",
str(index.create_sql(CharFieldModel, editor)),
)
def test_op_class(self):
index_name = "test_op_class"
index = Index(
OpClass(Lower("field"), name="text_pattern_ops"),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
def test_op_class_descending_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
index_name = "test_op_class_descending_collation"
index = Index(
Collate(
OpClass(Lower("field"), name="text_pattern_ops").desc(nulls_last=True),
collation=collation,
),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
self.assertIn(
"COLLATE %s" % editor.quote_name(collation),
str(index.create_sql(TextFieldModel, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
table = TextFieldModel._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_op_class_descending_partial(self):
index_name = "test_op_class_descending_partial"
index = Index(
OpClass(Lower("field"), name="text_pattern_ops").desc(),
name=index_name,
condition=Q(field__contains="China"),
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
def test_op_class_descending_partial_tablespace(self):
index_name = "test_op_class_descending_partial_tablespace"
index = Index(
OpClass(Lower("field").desc(), name="text_pattern_ops"),
name=index_name,
condition=Q(field__contains="China"),
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
self.assertIn(
'TABLESPACE "pg_default" ',
str(index.create_sql(TextFieldModel, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
|
42de7b79802cd704392c05be4b7b49dd704567f77898af5955d3caa165adb167 | from io import StringIO
from django.core.management import call_command
from . import PostgreSQLTestCase
class InspectDBTests(PostgreSQLTestCase):
def assertFieldsInModel(self, model, field_outputs):
out = StringIO()
call_command(
"inspectdb",
table_name_filter=lambda tn: tn.startswith(model),
stdout=out,
)
output = out.getvalue()
for field_output in field_outputs:
self.assertIn(field_output, output)
def test_range_fields(self):
self.assertFieldsInModel(
"postgres_tests_rangesmodel",
[
"ints = django.contrib.postgres.fields.IntegerRangeField(blank=True, "
"null=True)",
"bigints = django.contrib.postgres.fields.BigIntegerRangeField("
"blank=True, null=True)",
"decimals = django.contrib.postgres.fields.DecimalRangeField("
"blank=True, null=True)",
"timestamps = django.contrib.postgres.fields.DateTimeRangeField("
"blank=True, null=True)",
"dates = django.contrib.postgres.fields.DateRangeField(blank=True, "
"null=True)",
],
)
|
982c6d8eb0623461b9a6ac45c48aa64571f89287c26a3cc8baf0ce48a0e5a396 | import unittest
from decimal import Decimal
from django.db import connection
from django.db.backends.signals import connection_created
from django.db.migrations.writer import MigrationWriter
from django.test import TestCase
from django.test.utils import modify_settings
try:
from django.contrib.postgres.fields import (
DateRangeField,
DateTimeRangeField,
DecimalRangeField,
IntegerRangeField,
)
from django.db.backends.postgresql.psycopg_any import (
DateRange,
DateTimeRange,
DateTimeTZRange,
NumericRange,
is_psycopg3,
)
except ImportError:
pass
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
class PostgresConfigTests(TestCase):
def test_register_type_handlers_connection(self):
from django.contrib.postgres.signals import register_type_handlers
self.assertNotIn(
register_type_handlers, connection_created._live_receivers(None)
)
with modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}):
self.assertIn(
register_type_handlers, connection_created._live_receivers(None)
)
self.assertNotIn(
register_type_handlers, connection_created._live_receivers(None)
)
def test_register_serializer_for_migrations(self):
tests = (
(DateRange(empty=True), DateRangeField),
(DateTimeRange(empty=True), DateRangeField),
(DateTimeTZRange(None, None, "[]"), DateTimeRangeField),
(NumericRange(Decimal("1.0"), Decimal("5.0"), "()"), DecimalRangeField),
(NumericRange(1, 10), IntegerRangeField),
)
def assertNotSerializable():
for default, test_field in tests:
with self.subTest(default=default):
field = test_field(default=default)
with self.assertRaisesMessage(
ValueError, "Cannot serialize: %s" % default.__class__.__name__
):
MigrationWriter.serialize(field)
assertNotSerializable()
import_name = "psycopg.types.range" if is_psycopg3 else "psycopg2.extras"
with self.modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}):
for default, test_field in tests:
with self.subTest(default=default):
field = test_field(default=default)
serialized_field, imports = MigrationWriter.serialize(field)
self.assertEqual(
imports,
{
"import django.contrib.postgres.fields.ranges",
f"import {import_name}",
},
)
self.assertIn(
f"{field.__module__}.{field.__class__.__name__}"
f"(default={import_name}.{default!r})",
serialized_field,
)
assertNotSerializable()
|
69f17adbc7fd3292b0a74e9e635426dc4a5f8607cf6fdd9ea994d47d7585a7d4 | # RemovedInDjango51Warning.
"""
The citext PostgreSQL extension supports indexing of case-insensitive text
strings and thus eliminates the need for operations such as iexact and other
modifiers to enforce use of an index.
"""
from django.db import IntegrityError
from django.utils.deprecation import RemovedInDjango51Warning
from . import PostgreSQLTestCase
from .models import CITestModel
class CITextTestCase(PostgreSQLTestCase):
case_sensitive_lookups = ("contains", "startswith", "endswith", "regex")
@classmethod
def setUpTestData(cls):
cls.john = CITestModel.objects.create(
name="JoHn",
email="[email protected]",
description="Average Joe named JoHn",
array_field=["JoE", "jOhn"],
)
def test_equal_lowercase(self):
"""
citext removes the need for iexact as the index is case-insensitive.
"""
self.assertEqual(
CITestModel.objects.filter(name=self.john.name.lower()).count(), 1
)
self.assertEqual(
CITestModel.objects.filter(email=self.john.email.lower()).count(), 1
)
self.assertEqual(
CITestModel.objects.filter(
description=self.john.description.lower()
).count(),
1,
)
def test_fail_citext_primary_key(self):
"""
Creating an entry for a citext field used as a primary key which
clashes with an existing value isn't allowed.
"""
with self.assertRaises(IntegrityError):
CITestModel.objects.create(name="John")
def test_array_field(self):
instance = CITestModel.objects.get()
self.assertEqual(instance.array_field, self.john.array_field)
self.assertTrue(
CITestModel.objects.filter(array_field__contains=["joe"]).exists()
)
def test_lookups_name_char(self):
for lookup in self.case_sensitive_lookups:
with self.subTest(lookup=lookup):
query = {"name__{}".format(lookup): "john"}
self.assertSequenceEqual(
CITestModel.objects.filter(**query), [self.john]
)
def test_lookups_description_text(self):
for lookup, string in zip(
self.case_sensitive_lookups, ("average", "average joe", "john", "Joe.named")
):
with self.subTest(lookup=lookup, string=string):
query = {"description__{}".format(lookup): string}
self.assertSequenceEqual(
CITestModel.objects.filter(**query), [self.john]
)
def test_lookups_email(self):
for lookup, string in zip(
self.case_sensitive_lookups, ("john", "john", "john.com", "john.com")
):
with self.subTest(lookup=lookup, string=string):
query = {"email__{}".format(lookup): string}
self.assertSequenceEqual(
CITestModel.objects.filter(**query), [self.john]
)
def test_citext_deprecated(self):
from django.contrib.postgres.fields import CIText
msg = "django.contrib.postgres.fields.CIText mixin is deprecated."
with self.assertRaisesMessage(RemovedInDjango51Warning, msg):
CIText()
|
3d9cbaa421b25786cecea4185fd8762c6bdf1d2db808fbadeedd13920d285004 | from datetime import date
from . import PostgreSQLTestCase
from .models import (
HStoreModel,
IntegerArrayModel,
NestedIntegerArrayModel,
NullableIntegerArrayModel,
OtherTypesArrayModel,
RangesModel,
)
try:
from django.db.backends.postgresql.psycopg_any import DateRange, NumericRange
except ImportError:
pass # psycopg isn't installed.
class BulkSaveTests(PostgreSQLTestCase):
def test_bulk_update(self):
test_data = [
(IntegerArrayModel, "field", [], [1, 2, 3]),
(NullableIntegerArrayModel, "field", [1, 2, 3], None),
(NestedIntegerArrayModel, "field", [], [[1, 2, 3]]),
(HStoreModel, "field", {}, {1: 2}),
(RangesModel, "ints", None, NumericRange(lower=1, upper=10)),
(
RangesModel,
"dates",
None,
DateRange(lower=date.today(), upper=date.today()),
),
(OtherTypesArrayModel, "ips", [], ["1.2.3.4"]),
(OtherTypesArrayModel, "json", [], [{"a": "b"}]),
]
for Model, field, initial, new in test_data:
with self.subTest(model=Model, field=field):
instances = Model.objects.bulk_create(
Model(**{field: initial}) for _ in range(20)
)
for instance in instances:
setattr(instance, field, new)
Model.objects.bulk_update(instances, [field])
self.assertSequenceEqual(
Model.objects.filter(**{field: new}), instances
)
|
63b584b523d0309a702ff6af052b42be6e88ff98fd2f490d12709db738594495 | import datetime
from unittest import mock
from django.contrib.postgres.indexes import OpClass
from django.core.exceptions import ValidationError
from django.db import IntegrityError, NotSupportedError, connection, transaction
from django.db.models import (
CheckConstraint,
Deferrable,
F,
Func,
IntegerField,
Model,
Q,
UniqueConstraint,
)
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import Cast, Left, Lower
from django.test import ignore_warnings, skipUnlessDBFeature
from django.test.utils import isolate_apps
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango50Warning
from . import PostgreSQLTestCase
from .models import HotelReservation, IntegerArrayModel, RangesModel, Room, Scene
try:
from django.contrib.postgres.constraints import ExclusionConstraint
from django.contrib.postgres.fields import (
DateTimeRangeField,
RangeBoundary,
RangeOperators,
)
from django.db.backends.postgresql.psycopg_any import DateRange, NumericRange
except ImportError:
pass
class SchemaTests(PostgreSQLTestCase):
get_opclass_query = """
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = %s
"""
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_check_constraint_range_value(self):
constraint_name = "ints_between"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
check=Q(ints__contained_by=NumericRange(10, 30)),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(20, 50))
RangesModel.objects.create(ints=(10, 30))
def test_check_constraint_array_contains(self):
constraint = CheckConstraint(
check=Q(field__contains=[1]),
name="array_contains",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(IntegerArrayModel, IntegerArrayModel())
constraint.validate(IntegerArrayModel, IntegerArrayModel(field=[1]))
def test_check_constraint_array_length(self):
constraint = CheckConstraint(
check=Q(field__len=1),
name="array_length",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(IntegerArrayModel, IntegerArrayModel())
constraint.validate(IntegerArrayModel, IntegerArrayModel(field=[1]))
def test_check_constraint_daterange_contains(self):
constraint_name = "dates_contains"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
check=Q(dates__contains=F("dates_inner")),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
date_1 = datetime.date(2016, 1, 1)
date_2 = datetime.date(2016, 1, 4)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2.replace(day=5)),
)
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2),
)
def test_check_constraint_datetimerange_contains(self):
constraint_name = "timestamps_contains"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
check=Q(timestamps__contains=F("timestamps_inner")),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
datetime_1 = datetime.datetime(2016, 1, 1)
datetime_2 = datetime.datetime(2016, 1, 2, 12)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2.replace(hour=13)),
)
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2),
)
def test_check_constraint_range_contains(self):
constraint = CheckConstraint(
check=Q(ints__contains=(1, 5)),
name="ints_contains",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel(ints=(6, 10)))
def test_check_constraint_range_lower_upper(self):
constraint = CheckConstraint(
check=Q(ints__startswith__gte=0) & Q(ints__endswith__lte=99),
name="ints_range_lower_upper",
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel(ints=(-1, 20)))
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel(ints=(0, 100)))
constraint.validate(RangesModel, RangesModel(ints=(0, 99)))
def test_check_constraint_range_lower_with_nulls(self):
constraint = CheckConstraint(
check=Q(ints__isnull=True) | Q(ints__startswith__gte=0),
name="ints_optional_positive_range",
)
constraint.validate(RangesModel, RangesModel())
constraint = CheckConstraint(
check=Q(ints__startswith__gte=0),
name="ints_positive_range",
)
constraint.validate(RangesModel, RangesModel())
def test_opclass(self):
constraint = UniqueConstraint(
name="test_opclass",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
self.assertIn(constraint.name, self.get_constraints(Scene._meta.db_table))
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Scene, constraint)
self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table))
def test_opclass_multiple_columns(self):
constraint = UniqueConstraint(
name="test_opclass_multiple",
fields=["scene", "setting"],
opclasses=["varchar_pattern_ops", "text_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
expected_opclasses = (
("varchar_pattern_ops", constraint.name),
("text_pattern_ops", constraint.name),
)
self.assertCountEqual(cursor.fetchall(), expected_opclasses)
def test_opclass_partial(self):
constraint = UniqueConstraint(
name="test_opclass_partial",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
condition=Q(setting__contains="Sir Bedemir's Castle"),
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertCountEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_opclass_include(self):
constraint = UniqueConstraint(
name="test_opclass_include",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
include=["setting"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertCountEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_opclass_func(self):
constraint = UniqueConstraint(
OpClass(Lower("scene"), name="text_pattern_ops"),
name="test_opclass_func",
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertIn(constraint.name, constraints)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[("text_pattern_ops", constraint.name)],
)
Scene.objects.create(scene="Scene 10", setting="The dark forest of Ewing")
with self.assertRaises(IntegrityError), transaction.atomic():
Scene.objects.create(scene="ScEnE 10", setting="Sir Bedemir's Castle")
Scene.objects.create(scene="Scene 5", setting="Sir Bedemir's Castle")
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Scene, constraint)
self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table))
Scene.objects.create(scene="ScEnE 10", setting="Sir Bedemir's Castle")
class ExclusionConstraintTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_invalid_condition(self):
msg = "ExclusionConstraint.condition must be a Q instance."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="GIST",
name="exclude_invalid_condition",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
condition=F("invalid"),
)
def test_invalid_index_type(self):
msg = "Exclusion constraints only support GiST or SP-GiST indexes."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="gin",
name="exclude_invalid_index_type",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
)
def test_invalid_expressions(self):
msg = "The expressions must be a list of 2-tuples."
for expressions in (["foo"], [("foo")], [("foo_1", "foo_2", "foo_3")]):
with self.subTest(expressions), self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="GIST",
name="exclude_invalid_expressions",
expressions=expressions,
)
def test_empty_expressions(self):
msg = "At least one expression is required to define an exclusion constraint."
for empty_expressions in (None, []):
with self.subTest(empty_expressions), self.assertRaisesMessage(
ValueError, msg
):
ExclusionConstraint(
index_type="GIST",
name="exclude_empty_expressions",
expressions=empty_expressions,
)
def test_invalid_deferrable(self):
msg = "ExclusionConstraint.deferrable must be a Deferrable instance."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_deferrable",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
deferrable="invalid",
)
def test_invalid_include_type(self):
msg = "ExclusionConstraint.include must be a list or tuple."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_include",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
include="invalid",
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_invalid_opclasses_type(self):
msg = "ExclusionConstraint.opclasses must be a list or tuple."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_opclasses",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
opclasses="invalid",
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_opclasses_and_expressions_same_length(self):
msg = (
"ExclusionConstraint.expressions and "
"ExclusionConstraint.opclasses must have the same number of "
"elements."
)
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_expressions_opclasses_length",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
opclasses=["foo", "bar"],
)
def test_repr(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '&&'), (F(room), '=')] name='exclude_overlapping'>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
condition=Q(cancelled=False),
index_type="SPGiST",
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='SPGiST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"condition=(AND: ('cancelled', False))>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
deferrable=Deferrable.IMMEDIATE,
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"deferrable=Deferrable.IMMEDIATE>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
include=["cancelled", "room"],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"include=('cancelled', 'room')>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(OpClass("datespan", name="range_ops"), RangeOperators.ADJACENT_TO),
],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(OpClass(F(datespan), name=range_ops), '-|-')] "
"name='exclude_overlapping'>",
)
def test_eq(self):
constraint_1 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
constraint_2 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
constraint_3 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
condition=Q(cancelled=False),
)
constraint_4 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.DEFERRED,
)
constraint_5 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.IMMEDIATE,
)
constraint_6 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.IMMEDIATE,
include=["cancelled"],
)
constraint_7 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
include=["cancelled"],
)
with ignore_warnings(category=RemovedInDjango50Warning):
constraint_8 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
include=["cancelled"],
opclasses=["range_ops", "range_ops"],
)
constraint_9 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
opclasses=["range_ops", "range_ops"],
)
self.assertNotEqual(constraint_2, constraint_9)
self.assertNotEqual(constraint_7, constraint_8)
constraint_10 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
violation_error_message="custom error",
)
constraint_11 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
violation_error_message="other custom error",
)
self.assertEqual(constraint_1, constraint_1)
self.assertEqual(constraint_1, mock.ANY)
self.assertNotEqual(constraint_1, constraint_2)
self.assertNotEqual(constraint_1, constraint_3)
self.assertNotEqual(constraint_1, constraint_4)
self.assertNotEqual(constraint_1, constraint_10)
self.assertNotEqual(constraint_2, constraint_3)
self.assertNotEqual(constraint_2, constraint_4)
self.assertNotEqual(constraint_2, constraint_7)
self.assertNotEqual(constraint_4, constraint_5)
self.assertNotEqual(constraint_5, constraint_6)
self.assertNotEqual(constraint_1, object())
self.assertNotEqual(constraint_10, constraint_11)
self.assertEqual(constraint_10, constraint_10)
def test_deconstruct(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
},
)
def test_deconstruct_index_type(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
index_type="SPGIST",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"index_type": "SPGIST",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
},
)
def test_deconstruct_condition(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
"condition": Q(cancelled=False),
},
)
def test_deconstruct_deferrable(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
deferrable=Deferrable.DEFERRED,
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [("datespan", RangeOperators.OVERLAPS)],
"deferrable": Deferrable.DEFERRED,
},
)
def test_deconstruct_include(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
include=["cancelled", "room"],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [("datespan", RangeOperators.OVERLAPS)],
"include": ("cancelled", "room"),
},
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_deconstruct_opclasses(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
opclasses=["range_ops"],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [("datespan", RangeOperators.OVERLAPS)],
"opclasses": ["range_ops"],
},
)
def _test_range_overlaps(self, constraint):
# Create exclusion constraint.
self.assertNotIn(
constraint.name, self.get_constraints(HotelReservation._meta.db_table)
)
with connection.schema_editor() as editor:
editor.add_constraint(HotelReservation, constraint)
self.assertIn(
constraint.name, self.get_constraints(HotelReservation._meta.db_table)
)
# Add initial reservations.
room101 = Room.objects.create(number=101)
room102 = Room.objects.create(number=102)
datetimes = [
timezone.datetime(2018, 6, 20),
timezone.datetime(2018, 6, 24),
timezone.datetime(2018, 6, 26),
timezone.datetime(2018, 6, 28),
timezone.datetime(2018, 6, 29),
]
reservation = HotelReservation.objects.create(
datespan=DateRange(datetimes[0].date(), datetimes[1].date()),
start=datetimes[0],
end=datetimes[1],
room=room102,
)
constraint.validate(HotelReservation, reservation)
HotelReservation.objects.create(
datespan=DateRange(datetimes[1].date(), datetimes[3].date()),
start=datetimes[1],
end=datetimes[3],
room=room102,
)
HotelReservation.objects.create(
datespan=DateRange(datetimes[3].date(), datetimes[4].date()),
start=datetimes[3],
end=datetimes[4],
room=room102,
cancelled=True,
)
# Overlap dates.
with self.assertRaises(IntegrityError), transaction.atomic():
reservation = HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
)
msg = f"Constraint “{constraint.name}” is violated."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(HotelReservation, reservation)
reservation.save()
# Valid range.
other_valid_reservations = [
# Other room.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room101,
),
# Cancelled reservation.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[1].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
cancelled=True,
),
# Other adjacent dates.
HotelReservation(
datespan=(datetimes[3].date(), datetimes[4].date()),
start=datetimes[3],
end=datetimes[4],
room=room102,
),
]
for reservation in other_valid_reservations:
constraint.validate(HotelReservation, reservation)
HotelReservation.objects.bulk_create(other_valid_reservations)
# Excluded fields.
constraint.validate(
HotelReservation,
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
),
exclude={"room"},
)
constraint.validate(
HotelReservation,
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
),
exclude={"datespan", "start", "end", "room"},
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_range_overlaps_custom_opclasses(self):
class TsTzRange(Func):
function = "TSTZRANGE"
output_field = DateTimeRangeField()
constraint = ExclusionConstraint(
name="exclude_overlapping_reservations_custom",
expressions=[
(TsTzRange("start", "end", RangeBoundary()), RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
opclasses=["range_ops", "gist_int4_ops"],
)
self._test_range_overlaps(constraint)
def test_range_overlaps_custom(self):
class TsTzRange(Func):
function = "TSTZRANGE"
output_field = DateTimeRangeField()
constraint = ExclusionConstraint(
name="exclude_overlapping_reservations_custom_opclass",
expressions=[
(
OpClass(TsTzRange("start", "end", RangeBoundary()), "range_ops"),
RangeOperators.OVERLAPS,
),
(OpClass("room", "gist_int4_ops"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_overlaps(self):
constraint = ExclusionConstraint(
name="exclude_overlapping_reservations",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_adjacent(self):
constraint_name = "ints_adjacent"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(RangesModel, constraint)
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
def test_validate_range_adjacent(self):
constraint = ExclusionConstraint(
name="ints_adjacent",
expressions=[("ints", RangeOperators.ADJACENT_TO)],
violation_error_message="Custom error message.",
)
range_obj = RangesModel.objects.create(ints=(20, 50))
constraint.validate(RangesModel, range_obj)
msg = "Custom error message."
with self.assertRaisesMessage(ValidationError, msg):
constraint.validate(RangesModel, RangesModel(ints=(10, 20)))
constraint.validate(RangesModel, RangesModel(ints=(10, 19)))
constraint.validate(RangesModel, RangesModel(ints=(51, 60)))
constraint.validate(RangesModel, RangesModel(ints=(10, 20)), exclude={"ints"})
def test_expressions_with_params(self):
constraint_name = "scene_left_equal"
self.assertNotIn(constraint_name, self.get_constraints(Scene._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[(Left("scene", 4), RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
self.assertIn(constraint_name, self.get_constraints(Scene._meta.db_table))
def test_expressions_with_key_transform(self):
constraint_name = "exclude_overlapping_reservations_smoking"
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(KeyTextTransform("smoking", "requirements"), RangeOperators.EQUAL),
],
)
with connection.schema_editor() as editor:
editor.add_constraint(HotelReservation, constraint)
self.assertIn(
constraint_name,
self.get_constraints(HotelReservation._meta.db_table),
)
def test_index_transform(self):
constraint_name = "first_index_equal"
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("field__0", RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(IntegerArrayModel, constraint)
self.assertIn(
constraint_name,
self.get_constraints(IntegerArrayModel._meta.db_table),
)
def test_range_adjacent_initially_deferred(self):
constraint_name = "ints_adjacent_deferred"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
adjacent_range = RangesModel.objects.create(ints=(10, 20))
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(constraint_name)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
# Remove adjacent range before the end of transaction.
adjacent_range.delete()
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
def test_range_adjacent_initially_deferred_with_condition(self):
constraint_name = "ints_adjacent_deferred_with_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
condition=Q(ints__lt=(100, 200)),
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
adjacent_range = RangesModel.objects.create(ints=(10, 20))
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(constraint_name)
cursor.execute(f"SET CONSTRAINTS {quoted_name} IMMEDIATE")
# Remove adjacent range before the end of transaction.
adjacent_range.delete()
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Add adjacent range that doesn't match the condition.
RangesModel.objects.create(ints=(200, 500))
adjacent_range = RangesModel.objects.create(ints=(100, 200))
# Constraint behavior can be changed with SET CONSTRAINTS.
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(constraint_name)
cursor.execute(f"SET CONSTRAINTS {quoted_name} IMMEDIATE")
def test_range_adjacent_gist_include(self):
constraint_name = "ints_adjacent_gist_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
include=["decimals", "ints"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_range_adjacent_spgist_include(self):
constraint_name = "ints_adjacent_spgist_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["decimals", "ints"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
def test_range_adjacent_gist_include_condition(self):
constraint_name = "ints_adjacent_gist_include_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
include=["decimals"],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_range_adjacent_spgist_include_condition(self):
constraint_name = "ints_adjacent_spgist_include_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["decimals"],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_gist_include_deferrable(self):
constraint_name = "ints_adjacent_gist_include_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
include=["decimals"],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_range_adjacent_spgist_include_deferrable(self):
constraint_name = "ints_adjacent_spgist_include_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["decimals"],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_spgist_include_not_supported(self):
constraint_name = "ints_adjacent_spgist_include_not_supported"
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["id"],
)
msg = (
"Covering exclusion constraints using an SP-GiST index require "
"PostgreSQL 14+."
)
with connection.schema_editor() as editor:
with mock.patch(
"django.db.backends.postgresql.features.DatabaseFeatures."
"supports_covering_spgist_indexes",
False,
):
with self.assertRaisesMessage(NotSupportedError, msg):
editor.add_constraint(RangesModel, constraint)
def test_range_adjacent_opclass(self):
constraint_name = "ints_adjacent_opclass"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
constraints = self.get_constraints(RangesModel._meta.db_table)
self.assertIn(constraint_name, constraints)
with editor.connection.cursor() as cursor:
cursor.execute(SchemaTests.get_opclass_query, [constraint_name])
self.assertEqual(
cursor.fetchall(),
[("range_ops", constraint_name)],
)
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(RangesModel, constraint)
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
def test_range_adjacent_opclass_condition(self):
constraint_name = "ints_adjacent_opclass_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_opclass_deferrable(self):
constraint_name = "ints_adjacent_opclass_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_gist_opclass_include(self):
constraint_name = "ints_adjacent_gist_opclass_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
index_type="gist",
include=["decimals"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_range_adjacent_spgist_opclass_include(self):
constraint_name = "ints_adjacent_spgist_opclass_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
index_type="spgist",
include=["decimals"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_equal_cast(self):
constraint_name = "exclusion_equal_room_cast"
self.assertNotIn(constraint_name, self.get_constraints(Room._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[(Cast("number", IntegerField()), RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(Room, constraint)
self.assertIn(constraint_name, self.get_constraints(Room._meta.db_table))
@isolate_apps("postgres_tests")
def test_table_create(self):
constraint_name = "exclusion_equal_number_tc"
class ModelWithExclusionConstraint(Model):
number = IntegerField()
class Meta:
app_label = "postgres_tests"
constraints = [
ExclusionConstraint(
name=constraint_name,
expressions=[("number", RangeOperators.EQUAL)],
)
]
with connection.schema_editor() as editor:
editor.create_model(ModelWithExclusionConstraint)
self.assertIn(
constraint_name,
self.get_constraints(ModelWithExclusionConstraint._meta.db_table),
)
class ExclusionConstraintOpclassesDepracationTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_warning(self):
msg = (
"The opclasses argument is deprecated in favor of using "
"django.contrib.postgres.indexes.OpClass in "
"ExclusionConstraint.expressions."
)
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
opclasses=["range_ops"],
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_repr(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
opclasses=["range_ops"],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"opclasses=['range_ops']>",
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_range_adjacent_opclasses(self):
constraint_name = "ints_adjacent_opclasses"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
opclasses=["range_ops"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
constraints = self.get_constraints(RangesModel._meta.db_table)
self.assertIn(constraint_name, constraints)
with editor.connection.cursor() as cursor:
cursor.execute(SchemaTests.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[("range_ops", constraint.name)],
)
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(RangesModel, constraint)
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_range_adjacent_opclasses_condition(self):
constraint_name = "ints_adjacent_opclasses_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
opclasses=["range_ops"],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@ignore_warnings(category=RemovedInDjango50Warning)
def test_range_adjacent_opclasses_deferrable(self):
constraint_name = "ints_adjacent_opclasses_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
opclasses=["range_ops"],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@ignore_warnings(category=RemovedInDjango50Warning)
def test_range_adjacent_gist_opclasses_include(self):
constraint_name = "ints_adjacent_gist_opclasses_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
opclasses=["range_ops"],
include=["decimals"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@ignore_warnings(category=RemovedInDjango50Warning)
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_range_adjacent_spgist_opclasses_include(self):
constraint_name = "ints_adjacent_spgist_opclasses_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
opclasses=["range_ops"],
include=["decimals"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
|
2006a15ad419297bb943ad5ff2cafe6f2f0a8acd9c577703fb062322231ea356 | import os
import re
from io import StringIO
from unittest import mock, skipUnless
from django.core.management import call_command
from django.db import connection
from django.db.backends.base.introspection import TableInfo
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import PeopleMoreData, test_collation
def inspectdb_tables_only(table_name):
"""
Limit introspection to tables created for models of this app.
Some databases such as Oracle are extremely slow at introspection.
"""
return table_name.startswith("inspectdb_")
def inspectdb_views_only(table_name):
return table_name.startswith("inspectdb_") and table_name.endswith(
("_materialized", "_view")
)
def special_table_only(table_name):
return table_name.startswith("inspectdb_special")
class InspectDBTestCase(TestCase):
unique_re = re.compile(r".*unique_together = \((.+),\).*")
def test_stealth_table_name_filter_option(self):
out = StringIO()
call_command("inspectdb", table_name_filter=inspectdb_tables_only, stdout=out)
error_message = (
"inspectdb has examined a table that should have been filtered out."
)
# contrib.contenttypes is one of the apps always installed when running
# the Django test suite, check that one of its tables hasn't been
# inspected
self.assertNotIn(
"class DjangoContentType(models.Model):", out.getvalue(), msg=error_message
)
def test_table_option(self):
"""
inspectdb can inspect a subset of tables by passing the table names as
arguments.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_people", stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbPeople(models.Model):", output)
self.assertNotIn("InspectdbPeopledata", output)
def make_field_type_asserter(self):
"""
Call inspectdb and return a function to validate a field type in its
output.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
def assertFieldType(name, definition):
out_def = re.search(r"^\s*%s = (models.*)$" % name, output, re.MULTILINE)[1]
self.assertEqual(definition, out_def)
return assertFieldType
def test_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
introspected_field_types = connection.features.introspected_field_types
char_field_type = introspected_field_types["CharField"]
# Inspecting Oracle DB doesn't produce correct results (#19884):
# - it reports fields as blank=True when they aren't.
if (
not connection.features.interprets_empty_strings_as_nulls
and char_field_type == "CharField"
):
assertFieldType("char_field", "models.CharField(max_length=10)")
assertFieldType(
"null_char_field",
"models.CharField(max_length=10, blank=True, null=True)",
)
assertFieldType("email_field", "models.CharField(max_length=254)")
assertFieldType("file_field", "models.CharField(max_length=100)")
assertFieldType("file_path_field", "models.CharField(max_length=100)")
assertFieldType("slug_field", "models.CharField(max_length=50)")
assertFieldType("text_field", "models.TextField()")
assertFieldType("url_field", "models.CharField(max_length=200)")
if char_field_type == "TextField":
assertFieldType("char_field", "models.TextField()")
assertFieldType(
"null_char_field", "models.TextField(blank=True, null=True)"
)
assertFieldType("email_field", "models.TextField()")
assertFieldType("file_field", "models.TextField()")
assertFieldType("file_path_field", "models.TextField()")
assertFieldType("slug_field", "models.TextField()")
assertFieldType("text_field", "models.TextField()")
assertFieldType("url_field", "models.TextField()")
assertFieldType("date_field", "models.DateField()")
assertFieldType("date_time_field", "models.DateTimeField()")
if introspected_field_types["GenericIPAddressField"] == "GenericIPAddressField":
assertFieldType("gen_ip_address_field", "models.GenericIPAddressField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType("gen_ip_address_field", "models.CharField(max_length=39)")
assertFieldType(
"time_field", "models.%s()" % introspected_field_types["TimeField"]
)
if connection.features.has_native_uuid_field:
assertFieldType("uuid_field", "models.UUIDField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType("uuid_field", "models.CharField(max_length=32)")
@skipUnlessDBFeature("can_introspect_json_field", "supports_json_field")
def test_json_field(self):
out = StringIO()
call_command("inspectdb", "inspectdb_jsonfieldcolumntype", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn("json_field = models.JSONField()", output)
self.assertIn(
"null_json_field = models.JSONField(blank=True, null=True)", output
)
@skipUnlessDBFeature("supports_comments")
def test_db_comments(self):
out = StringIO()
call_command("inspectdb", "inspectdb_dbcomment", stdout=out)
output = out.getvalue()
integer_field_type = connection.features.introspected_field_types[
"IntegerField"
]
self.assertIn(
f"rank = models.{integer_field_type}("
f"db_comment=\"'Rank' column comment\")",
output,
)
self.assertIn(
" db_table_comment = 'Custom table comment'",
output,
)
@skipUnlessDBFeature("supports_collation_on_charfield")
@skipUnless(test_collation, "Language collations are not supported.")
def test_char_field_db_collation(self):
out = StringIO()
call_command("inspectdb", "inspectdb_charfielddbcollation", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn(
"char_field = models.CharField(max_length=10, "
"db_collation='%s')" % test_collation,
output,
)
else:
self.assertIn(
"char_field = models.CharField(max_length=10, "
"db_collation='%s', blank=True, null=True)" % test_collation,
output,
)
@skipUnlessDBFeature("supports_collation_on_textfield")
@skipUnless(test_collation, "Language collations are not supported.")
def test_text_field_db_collation(self):
out = StringIO()
call_command("inspectdb", "inspectdb_textfielddbcollation", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn(
"text_field = models.TextField(db_collation='%s')" % test_collation,
output,
)
else:
self.assertIn(
"text_field = models.TextField(db_collation='%s, blank=True, "
"null=True)" % test_collation,
output,
)
@skipUnlessDBFeature("supports_unlimited_charfield")
def test_char_field_unlimited(self):
out = StringIO()
call_command("inspectdb", "inspectdb_charfieldunlimited", stdout=out)
output = out.getvalue()
self.assertIn("char_field = models.CharField()", output)
def test_number_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
introspected_field_types = connection.features.introspected_field_types
auto_field_type = connection.features.introspected_field_types["AutoField"]
if auto_field_type != "AutoField":
assertFieldType(
"id", "models.%s(primary_key=True) # AutoField?" % auto_field_type
)
assertFieldType(
"big_int_field", "models.%s()" % introspected_field_types["BigIntegerField"]
)
bool_field_type = introspected_field_types["BooleanField"]
assertFieldType("bool_field", "models.{}()".format(bool_field_type))
assertFieldType(
"null_bool_field",
"models.{}(blank=True, null=True)".format(bool_field_type),
)
if connection.vendor != "sqlite":
assertFieldType(
"decimal_field", "models.DecimalField(max_digits=6, decimal_places=1)"
)
else: # Guessed arguments on SQLite, see #5014
assertFieldType(
"decimal_field",
"models.DecimalField(max_digits=10, decimal_places=5) "
"# max_digits and decimal_places have been guessed, "
"as this database handles decimal fields as float",
)
assertFieldType("float_field", "models.FloatField()")
assertFieldType(
"int_field", "models.%s()" % introspected_field_types["IntegerField"]
)
assertFieldType(
"pos_int_field",
"models.%s()" % introspected_field_types["PositiveIntegerField"],
)
assertFieldType(
"pos_big_int_field",
"models.%s()" % introspected_field_types["PositiveBigIntegerField"],
)
assertFieldType(
"pos_small_int_field",
"models.%s()" % introspected_field_types["PositiveSmallIntegerField"],
)
assertFieldType(
"small_int_field",
"models.%s()" % introspected_field_types["SmallIntegerField"],
)
@skipUnlessDBFeature("can_introspect_foreign_keys")
def test_attribute_name_not_python_keyword(self):
out = StringIO()
call_command("inspectdb", table_name_filter=inspectdb_tables_only, stdout=out)
output = out.getvalue()
error_message = (
"inspectdb generated an attribute name which is a Python keyword"
)
# Recursive foreign keys should be set to 'self'
self.assertIn("parent = models.ForeignKey('self', models.DO_NOTHING)", output)
self.assertNotIn(
"from = models.ForeignKey(InspectdbPeople, models.DO_NOTHING)",
output,
msg=error_message,
)
# As InspectdbPeople model is defined after InspectdbMessage, it should
# be quoted.
self.assertIn(
"from_field = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, "
"db_column='from_id')",
output,
)
self.assertIn(
"people_pk = models.OneToOneField(InspectdbPeople, models.DO_NOTHING, "
"primary_key=True)",
output,
)
self.assertIn(
"people_unique = models.OneToOneField(InspectdbPeople, models.DO_NOTHING)",
output,
)
@skipUnlessDBFeature("can_introspect_foreign_keys")
def test_foreign_key_to_field(self):
out = StringIO()
call_command("inspectdb", "inspectdb_foreignkeytofield", stdout=out)
self.assertIn(
"to_field_fk = models.ForeignKey('InspectdbPeoplemoredata', "
"models.DO_NOTHING, to_field='people_unique_id')",
out.getvalue(),
)
def test_digits_column_name_introspection(self):
"""Introspection of column names consist/start with digits (#16536/#17676)"""
char_field_type = connection.features.introspected_field_types["CharField"]
out = StringIO()
call_command("inspectdb", "inspectdb_digitsincolumnname", stdout=out)
output = out.getvalue()
error_message = "inspectdb generated a model field name which is a number"
self.assertNotIn(
" 123 = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_123 = models.%s" % char_field_type, output)
error_message = (
"inspectdb generated a model field name which starts with a digit"
)
self.assertNotIn(
" 4extra = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_4extra = models.%s" % char_field_type, output)
self.assertNotIn(
" 45extra = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_45extra = models.%s" % char_field_type, output)
def test_special_column_name_introspection(self):
"""
Introspection of column names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command("inspectdb", table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
base_name = connection.introspection.identifier_converter("Field")
integer_field_type = connection.features.introspected_field_types[
"IntegerField"
]
self.assertIn("field = models.%s()" % integer_field_type, output)
self.assertIn(
"field_field = models.%s(db_column='%s_')"
% (integer_field_type, base_name),
output,
)
self.assertIn(
"field_field_0 = models.%s(db_column='%s__')"
% (integer_field_type, base_name),
output,
)
self.assertIn(
"field_field_1 = models.%s(db_column='__field')" % integer_field_type,
output,
)
self.assertIn(
"prc_x = models.{}(db_column='prc(%) x')".format(integer_field_type), output
)
self.assertIn("tamaño = models.%s()" % integer_field_type, output)
def test_table_name_introspection(self):
"""
Introspection of table names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command("inspectdb", table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbSpecialTableName(models.Model):", output)
@skipUnlessDBFeature("supports_expression_indexes")
def test_table_with_func_unique_constraint(self):
out = StringIO()
call_command("inspectdb", "inspectdb_funcuniqueconstraint", stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbFuncuniqueconstraint(models.Model):", output)
def test_managed_models(self):
"""
By default the command generates models with `Meta.managed = False`.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
self.longMessage = False
self.assertIn(
" managed = False",
output,
msg="inspectdb should generate unmanaged models.",
)
def test_unique_together_meta(self):
out = StringIO()
call_command("inspectdb", "inspectdb_uniquetogether", stdout=out)
output = out.getvalue()
self.assertIn(" unique_together = (('", output)
unique_together_match = self.unique_re.findall(output)
# There should be one unique_together tuple.
self.assertEqual(len(unique_together_match), 1)
fields = unique_together_match[0]
# Fields with db_column = field name.
self.assertIn("('field1', 'field2')", fields)
# Fields from columns whose names are Python keywords.
self.assertIn("('field1', 'field2')", fields)
# Fields whose names normalize to the same Python field name and hence
# are given an integer suffix.
self.assertIn("('non_unique_column', 'non_unique_column_0')", fields)
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_unsupported_unique_together(self):
"""Unsupported index types (COALESCE here) are skipped."""
with connection.cursor() as c:
c.execute(
"CREATE UNIQUE INDEX Findex ON %s "
"(id, people_unique_id, COALESCE(message_id, -1))"
% PeopleMoreData._meta.db_table
)
try:
out = StringIO()
call_command(
"inspectdb",
table_name_filter=lambda tn: tn.startswith(
PeopleMoreData._meta.db_table
),
stdout=out,
)
output = out.getvalue()
self.assertIn("# A unique constraint could not be introspected.", output)
self.assertEqual(
self.unique_re.findall(output), ["('id', 'people_unique')"]
)
finally:
with connection.cursor() as c:
c.execute("DROP INDEX Findex")
@skipUnless(
connection.vendor == "sqlite",
"Only patched sqlite's DatabaseIntrospection.data_types_reverse for this test",
)
def test_custom_fields(self):
"""
Introspection of columns with a custom field (#21090)
"""
out = StringIO()
with mock.patch(
"django.db.connection.introspection.data_types_reverse."
"base_data_types_reverse",
{
"text": "myfields.TextField",
"bigint": "BigIntegerField",
},
):
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
self.assertIn("text_field = myfields.TextField()", output)
self.assertIn("big_int_field = models.BigIntegerField()", output)
def test_introspection_errors(self):
"""
Introspection errors should not crash the command, and the error should
be visible in the output.
"""
out = StringIO()
with mock.patch(
"django.db.connection.introspection.get_table_list",
return_value=[TableInfo(name="nonexistent", type="t")],
):
call_command("inspectdb", stdout=out)
output = out.getvalue()
self.assertIn("# Unable to inspect table 'nonexistent'", output)
# The error message depends on the backend
self.assertIn("# The error was:", output)
def test_same_relations(self):
out = StringIO()
call_command("inspectdb", "inspectdb_message", stdout=out)
self.assertIn(
"author = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, "
"related_name='inspectdbmessage_author_set')",
out.getvalue(),
)
class InspectDBTransactionalTests(TransactionTestCase):
available_apps = ["inspectdb"]
def test_include_views(self):
"""inspectdb --include-views creates models for database views."""
with connection.cursor() as cursor:
cursor.execute(
"CREATE VIEW inspectdb_people_view AS "
"SELECT id, name FROM inspectdb_people"
)
out = StringIO()
view_model = "class InspectdbPeopleView(models.Model):"
view_managed = "managed = False # Created from a view."
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
stdout=out,
)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
include_views=True,
stdout=out,
)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP VIEW inspectdb_people_view")
@skipUnlessDBFeature("can_introspect_materialized_views")
def test_include_materialized_views(self):
"""inspectdb --include-views creates models for materialized views."""
with connection.cursor() as cursor:
cursor.execute(
"CREATE MATERIALIZED VIEW inspectdb_people_materialized AS "
"SELECT id, name FROM inspectdb_people"
)
out = StringIO()
view_model = "class InspectdbPeopleMaterialized(models.Model):"
view_managed = "managed = False # Created from a view."
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
stdout=out,
)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
include_views=True,
stdout=out,
)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP MATERIALIZED VIEW inspectdb_people_materialized")
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_include_partitions(self):
"""inspectdb --include-partitions creates models for partitions."""
with connection.cursor() as cursor:
cursor.execute(
"""\
CREATE TABLE inspectdb_partition_parent (name text not null)
PARTITION BY LIST (left(upper(name), 1))
"""
)
cursor.execute(
"""\
CREATE TABLE inspectdb_partition_child
PARTITION OF inspectdb_partition_parent
FOR VALUES IN ('A', 'B', 'C')
"""
)
out = StringIO()
partition_model_parent = "class InspectdbPartitionParent(models.Model):"
partition_model_child = "class InspectdbPartitionChild(models.Model):"
partition_managed = "managed = False # Created from a partition."
try:
call_command(
"inspectdb", table_name_filter=inspectdb_tables_only, stdout=out
)
no_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, no_partitions_output)
self.assertNotIn(partition_model_child, no_partitions_output)
self.assertNotIn(partition_managed, no_partitions_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_tables_only,
include_partitions=True,
stdout=out,
)
with_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, with_partitions_output)
self.assertIn(partition_model_child, with_partitions_output)
self.assertIn(partition_managed, with_partitions_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP TABLE IF EXISTS inspectdb_partition_child")
cursor.execute("DROP TABLE IF EXISTS inspectdb_partition_parent")
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_foreign_data_wrapper(self):
with connection.cursor() as cursor:
cursor.execute("CREATE EXTENSION IF NOT EXISTS file_fdw")
cursor.execute(
"CREATE SERVER inspectdb_server FOREIGN DATA WRAPPER file_fdw"
)
cursor.execute(
connection.ops.compose_sql(
"""
CREATE FOREIGN TABLE inspectdb_iris_foreign_table (
petal_length real,
petal_width real,
sepal_length real,
sepal_width real
) SERVER inspectdb_server OPTIONS (
filename %s
)
""",
[os.devnull],
)
)
out = StringIO()
foreign_table_model = "class InspectdbIrisForeignTable(models.Model):"
foreign_table_managed = "managed = False"
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_tables_only,
stdout=out,
)
output = out.getvalue()
self.assertIn(foreign_table_model, output)
self.assertIn(foreign_table_managed, output)
finally:
with connection.cursor() as cursor:
cursor.execute(
"DROP FOREIGN TABLE IF EXISTS inspectdb_iris_foreign_table"
)
cursor.execute("DROP SERVER IF EXISTS inspectdb_server")
cursor.execute("DROP EXTENSION IF EXISTS file_fdw")
@skipUnlessDBFeature("create_test_table_with_composite_primary_key")
def test_composite_primary_key(self):
table_name = "test_table_composite_pk"
with connection.cursor() as cursor:
cursor.execute(
connection.features.create_test_table_with_composite_primary_key
)
out = StringIO()
if connection.vendor == "sqlite":
field_type = connection.features.introspected_field_types["AutoField"]
else:
field_type = connection.features.introspected_field_types["IntegerField"]
try:
call_command("inspectdb", table_name, stdout=out)
output = out.getvalue()
self.assertIn(
f"column_1 = models.{field_type}(primary_key=True) # The composite "
f"primary key (column_1, column_2) found, that is not supported. The "
f"first column is selected.",
output,
)
self.assertIn(
"column_2 = models.%s()"
% connection.features.introspected_field_types["IntegerField"],
output,
)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP TABLE %s" % table_name)
|
c2823240ea3b5cdf056f8334e4cc789c86478917fc1b9a97e13721bc08182cc2 | from django.db import connection, models
from django.db.models.functions import Lower
class People(models.Model):
name = models.CharField(max_length=255)
parent = models.ForeignKey("self", models.CASCADE)
class Message(models.Model):
from_field = models.ForeignKey(People, models.CASCADE, db_column="from_id")
author = models.ForeignKey(People, models.CASCADE, related_name="message_authors")
class PeopleData(models.Model):
people_pk = models.ForeignKey(People, models.CASCADE, primary_key=True)
ssn = models.CharField(max_length=11)
class PeopleMoreData(models.Model):
people_unique = models.ForeignKey(People, models.CASCADE, unique=True)
message = models.ForeignKey(Message, models.CASCADE, blank=True, null=True)
license = models.CharField(max_length=255)
class ForeignKeyToField(models.Model):
to_field_fk = models.ForeignKey(
PeopleMoreData,
models.CASCADE,
to_field="people_unique",
)
class DigitsInColumnName(models.Model):
all_digits = models.CharField(max_length=11, db_column="123")
leading_digit = models.CharField(max_length=11, db_column="4extra")
leading_digits = models.CharField(max_length=11, db_column="45extra")
class SpecialName(models.Model):
field = models.IntegerField(db_column="field")
# Underscores
field_field_0 = models.IntegerField(db_column="Field_")
field_field_1 = models.IntegerField(db_column="Field__")
field_field_2 = models.IntegerField(db_column="__field")
# Other chars
prc_x = models.IntegerField(db_column="prc(%) x")
non_ascii = models.IntegerField(db_column="tamaño")
class Meta:
db_table = "inspectdb_special.table name"
class ColumnTypes(models.Model):
id = models.AutoField(primary_key=True)
big_int_field = models.BigIntegerField()
bool_field = models.BooleanField(default=False)
null_bool_field = models.BooleanField(null=True)
char_field = models.CharField(max_length=10)
null_char_field = models.CharField(max_length=10, blank=True, null=True)
date_field = models.DateField()
date_time_field = models.DateTimeField()
decimal_field = models.DecimalField(max_digits=6, decimal_places=1)
email_field = models.EmailField()
file_field = models.FileField(upload_to="unused")
file_path_field = models.FilePathField()
float_field = models.FloatField()
int_field = models.IntegerField()
gen_ip_address_field = models.GenericIPAddressField(protocol="ipv4")
pos_big_int_field = models.PositiveBigIntegerField()
pos_int_field = models.PositiveIntegerField()
pos_small_int_field = models.PositiveSmallIntegerField()
slug_field = models.SlugField()
small_int_field = models.SmallIntegerField()
text_field = models.TextField()
time_field = models.TimeField()
url_field = models.URLField()
uuid_field = models.UUIDField()
class JSONFieldColumnType(models.Model):
json_field = models.JSONField()
null_json_field = models.JSONField(blank=True, null=True)
class Meta:
required_db_features = {
"can_introspect_json_field",
"supports_json_field",
}
test_collation = connection.features.test_collations.get("non_default")
class CharFieldDbCollation(models.Model):
char_field = models.CharField(max_length=10, db_collation=test_collation)
class Meta:
required_db_features = {"supports_collation_on_charfield"}
class TextFieldDbCollation(models.Model):
text_field = models.TextField(db_collation=test_collation)
class Meta:
required_db_features = {"supports_collation_on_textfield"}
class CharFieldUnlimited(models.Model):
char_field = models.CharField(max_length=None)
class Meta:
required_db_features = {"supports_unlimited_charfield"}
class UniqueTogether(models.Model):
field1 = models.IntegerField()
field2 = models.CharField(max_length=10)
from_field = models.IntegerField(db_column="from")
non_unique = models.IntegerField(db_column="non__unique_column")
non_unique_0 = models.IntegerField(db_column="non_unique__column")
class Meta:
unique_together = [
("field1", "field2"),
("from_field", "field1"),
("non_unique", "non_unique_0"),
]
class FuncUniqueConstraint(models.Model):
name = models.CharField(max_length=255)
rank = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
Lower("name"), models.F("rank"), name="index_lower_name"
)
]
required_db_features = {"supports_expression_indexes"}
class DbComment(models.Model):
rank = models.IntegerField(db_comment="'Rank' column comment")
class Meta:
db_table_comment = "Custom table comment"
required_db_features = {"supports_comments"}
|
6d7d2c5a76c4371e992a9c59aeb136ff32769ccdb646db10a1dc467252afc6b4 | import gzip
import random
import re
import struct
from io import BytesIO
from unittest import mock
from urllib.parse import quote
from django.conf import settings
from django.core import mail
from django.core.exceptions import PermissionDenied
from django.http import (
FileResponse,
HttpRequest,
HttpResponse,
HttpResponseNotFound,
HttpResponsePermanentRedirect,
HttpResponseRedirect,
StreamingHttpResponse,
)
from django.middleware.clickjacking import XFrameOptionsMiddleware
from django.middleware.common import BrokenLinkEmailsMiddleware, CommonMiddleware
from django.middleware.gzip import GZipMiddleware
from django.middleware.http import ConditionalGetMiddleware
from django.test import RequestFactory, SimpleTestCase, override_settings
int2byte = struct.Struct(">B").pack
def get_response_empty(request):
return HttpResponse()
def get_response_404(request):
return HttpResponseNotFound()
@override_settings(ROOT_URLCONF="middleware.urls")
class CommonMiddlewareTest(SimpleTestCase):
rf = RequestFactory()
@override_settings(APPEND_SLASH=True)
def test_append_slash_have_slash(self):
"""
URLs with slashes should go unmolested.
"""
request = self.rf.get("/slash/")
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_resource(self):
"""
Matches to explicit slashless URLs should go unmolested.
"""
def get_response(req):
return HttpResponse("Here's the text of the web page.")
request = self.rf.get("/noslash")
self.assertIsNone(CommonMiddleware(get_response).process_request(request))
self.assertEqual(
CommonMiddleware(get_response)(request).content,
b"Here's the text of the web page.",
)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_unknown(self):
"""
APPEND_SLASH should not redirect to unknown resources.
"""
request = self.rf.get("/unknown")
response = CommonMiddleware(get_response_404)(request)
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect(self):
"""
APPEND_SLASH should redirect slashless URLs to a valid pattern.
"""
request = self.rf.get("/slash")
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertIsNone(r)
response = HttpResponseNotFound()
r = CommonMiddleware(get_response_empty).process_response(request, response)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/slash/")
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_querystring(self):
"""
APPEND_SLASH should preserve querystrings when redirecting.
"""
request = self.rf.get("/slash?test=1")
resp = CommonMiddleware(get_response_404)(request)
self.assertEqual(resp.url, "/slash/?test=1")
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_querystring_have_slash(self):
"""
APPEND_SLASH should append slash to path when redirecting a request
with a querystring ending with slash.
"""
request = self.rf.get("/slash?test=slash/")
resp = CommonMiddleware(get_response_404)(request)
self.assertIsInstance(resp, HttpResponsePermanentRedirect)
self.assertEqual(resp.url, "/slash/?test=slash/")
@override_settings(APPEND_SLASH=True, DEBUG=True)
def test_append_slash_no_redirect_on_POST_in_DEBUG(self):
"""
While in debug mode, an exception is raised with a warning
when a failed attempt is made to POST, PUT, or PATCH to an URL which
would normally be redirected to a slashed version.
"""
msg = "maintaining %s data. Change your form to point to testserver/slash/"
request = self.rf.get("/slash")
request.method = "POST"
with self.assertRaisesMessage(RuntimeError, msg % request.method):
CommonMiddleware(get_response_404)(request)
request = self.rf.get("/slash")
request.method = "PUT"
with self.assertRaisesMessage(RuntimeError, msg % request.method):
CommonMiddleware(get_response_404)(request)
request = self.rf.get("/slash")
request.method = "PATCH"
with self.assertRaisesMessage(RuntimeError, msg % request.method):
CommonMiddleware(get_response_404)(request)
@override_settings(APPEND_SLASH=False)
def test_append_slash_disabled(self):
"""
Disabling append slash functionality should leave slashless URLs alone.
"""
request = self.rf.get("/slash")
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_opt_out(self):
"""
Views marked with @no_append_slash should be left alone.
"""
request = self.rf.get("/sensitive_fbv")
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
request = self.rf.get("/sensitive_cbv")
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_quoted(self):
"""
URLs which require quoting should be redirected to their slash version.
"""
request = self.rf.get(quote("/needsquoting#"))
r = CommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/needsquoting%23/")
@override_settings(APPEND_SLASH=True)
def test_append_slash_leading_slashes(self):
"""
Paths starting with two slashes are escaped to prevent open redirects.
If there's a URL pattern that allows paths to start with two slashes, a
request with path //evil.com must not redirect to //evil.com/ (appended
slash) which is a schemaless absolute URL. The browser would navigate
to evil.com/.
"""
# Use 4 slashes because of RequestFactory behavior.
request = self.rf.get("////evil.com/security")
r = CommonMiddleware(get_response_404).process_request(request)
self.assertIsNone(r)
response = HttpResponseNotFound()
r = CommonMiddleware(get_response_404).process_response(request, response)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/%2Fevil.com/security/")
r = CommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/%2Fevil.com/security/")
@override_settings(APPEND_SLASH=False, PREPEND_WWW=True)
def test_prepend_www(self):
request = self.rf.get("/path/")
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/path/")
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_have_slash(self):
request = self.rf.get("/slash/")
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/slash/")
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_slashless(self):
request = self.rf.get("/slash")
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/slash/")
# The following tests examine expected behavior given a custom URLconf that
# overrides the default one through the request object.
@override_settings(APPEND_SLASH=True)
def test_append_slash_have_slash_custom_urlconf(self):
"""
URLs with slashes should go unmolested.
"""
request = self.rf.get("/customurlconf/slash/")
request.urlconf = "middleware.extra_urls"
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_resource_custom_urlconf(self):
"""
Matches to explicit slashless URLs should go unmolested.
"""
def get_response(req):
return HttpResponse("web content")
request = self.rf.get("/customurlconf/noslash")
request.urlconf = "middleware.extra_urls"
self.assertIsNone(CommonMiddleware(get_response).process_request(request))
self.assertEqual(
CommonMiddleware(get_response)(request).content, b"web content"
)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_unknown_custom_urlconf(self):
"""
APPEND_SLASH should not redirect to unknown resources.
"""
request = self.rf.get("/customurlconf/unknown")
request.urlconf = "middleware.extra_urls"
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_custom_urlconf(self):
"""
APPEND_SLASH should redirect slashless URLs to a valid pattern.
"""
request = self.rf.get("/customurlconf/slash")
request.urlconf = "middleware.extra_urls"
r = CommonMiddleware(get_response_404)(request)
self.assertIsNotNone(
r,
"CommonMiddleware failed to return APPEND_SLASH redirect using "
"request.urlconf",
)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/customurlconf/slash/")
@override_settings(APPEND_SLASH=True, DEBUG=True)
def test_append_slash_no_redirect_on_POST_in_DEBUG_custom_urlconf(self):
"""
While in debug mode, an exception is raised with a warning
when a failed attempt is made to POST to an URL which would normally be
redirected to a slashed version.
"""
request = self.rf.get("/customurlconf/slash")
request.urlconf = "middleware.extra_urls"
request.method = "POST"
with self.assertRaisesMessage(RuntimeError, "end in a slash"):
CommonMiddleware(get_response_404)(request)
@override_settings(APPEND_SLASH=False)
def test_append_slash_disabled_custom_urlconf(self):
"""
Disabling append slash functionality should leave slashless URLs alone.
"""
request = self.rf.get("/customurlconf/slash")
request.urlconf = "middleware.extra_urls"
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_quoted_custom_urlconf(self):
"""
URLs which require quoting should be redirected to their slash version.
"""
request = self.rf.get(quote("/customurlconf/needsquoting#"))
request.urlconf = "middleware.extra_urls"
r = CommonMiddleware(get_response_404)(request)
self.assertIsNotNone(
r,
"CommonMiddleware failed to return APPEND_SLASH redirect using "
"request.urlconf",
)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/customurlconf/needsquoting%23/")
@override_settings(APPEND_SLASH=False, PREPEND_WWW=True)
def test_prepend_www_custom_urlconf(self):
request = self.rf.get("/customurlconf/path/")
request.urlconf = "middleware.extra_urls"
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/customurlconf/path/")
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_have_slash_custom_urlconf(self):
request = self.rf.get("/customurlconf/slash/")
request.urlconf = "middleware.extra_urls"
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/customurlconf/slash/")
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_slashless_custom_urlconf(self):
request = self.rf.get("/customurlconf/slash")
request.urlconf = "middleware.extra_urls"
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/customurlconf/slash/")
# Tests for the Content-Length header
def test_content_length_header_added(self):
def get_response(req):
response = HttpResponse("content")
self.assertNotIn("Content-Length", response)
return response
response = CommonMiddleware(get_response)(self.rf.get("/"))
self.assertEqual(int(response.headers["Content-Length"]), len(response.content))
def test_content_length_header_not_added_for_streaming_response(self):
def get_response(req):
response = StreamingHttpResponse("content")
self.assertNotIn("Content-Length", response)
return response
response = CommonMiddleware(get_response)(self.rf.get("/"))
self.assertNotIn("Content-Length", response)
def test_content_length_header_not_changed(self):
bad_content_length = 500
def get_response(req):
response = HttpResponse()
response.headers["Content-Length"] = bad_content_length
return response
response = CommonMiddleware(get_response)(self.rf.get("/"))
self.assertEqual(int(response.headers["Content-Length"]), bad_content_length)
# Other tests
@override_settings(DISALLOWED_USER_AGENTS=[re.compile(r"foo")])
def test_disallowed_user_agents(self):
request = self.rf.get("/slash")
request.META["HTTP_USER_AGENT"] = "foo"
with self.assertRaisesMessage(PermissionDenied, "Forbidden user agent"):
CommonMiddleware(get_response_empty).process_request(request)
def test_non_ascii_query_string_does_not_crash(self):
"""Regression test for #15152"""
request = self.rf.get("/slash")
request.META["QUERY_STRING"] = "drink=café"
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertIsNone(r)
response = HttpResponseNotFound()
r = CommonMiddleware(get_response_empty).process_response(request, response)
self.assertEqual(r.status_code, 301)
def test_response_redirect_class(self):
request = self.rf.get("/slash")
r = CommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/slash/")
self.assertIsInstance(r, HttpResponsePermanentRedirect)
def test_response_redirect_class_subclass(self):
class MyCommonMiddleware(CommonMiddleware):
response_redirect_class = HttpResponseRedirect
request = self.rf.get("/slash")
r = MyCommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, "/slash/")
self.assertIsInstance(r, HttpResponseRedirect)
@override_settings(
IGNORABLE_404_URLS=[re.compile(r"foo")],
MANAGERS=[("PHD", "[email protected]")],
)
class BrokenLinkEmailsMiddlewareTest(SimpleTestCase):
rf = RequestFactory()
def setUp(self):
self.req = self.rf.get("/regular_url/that/does/not/exist")
def get_response(self, req):
return self.client.get(req.path)
def test_404_error_reporting(self):
self.req.META["HTTP_REFERER"] = "/another/url/"
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("Broken", mail.outbox[0].subject)
def test_404_error_reporting_no_referer(self):
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
def test_404_error_reporting_ignored_url(self):
self.req.path = self.req.path_info = "foo_url/that/does/not/exist"
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
def test_custom_request_checker(self):
class SubclassedMiddleware(BrokenLinkEmailsMiddleware):
ignored_user_agent_patterns = (
re.compile(r"Spider.*"),
re.compile(r"Robot.*"),
)
def is_ignorable_request(self, request, uri, domain, referer):
"""Check user-agent in addition to normal checks."""
if super().is_ignorable_request(request, uri, domain, referer):
return True
user_agent = request.META["HTTP_USER_AGENT"]
return any(
pattern.search(user_agent)
for pattern in self.ignored_user_agent_patterns
)
self.req.META["HTTP_REFERER"] = "/another/url/"
self.req.META["HTTP_USER_AGENT"] = "Spider machine 3.4"
SubclassedMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
self.req.META["HTTP_USER_AGENT"] = "My user agent"
SubclassedMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
def test_referer_equal_to_requested_url(self):
"""
Some bots set the referer to the current URL to avoid being blocked by
an referer check (#25302).
"""
self.req.META["HTTP_REFERER"] = self.req.path
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
# URL with scheme and domain should also be ignored
self.req.META["HTTP_REFERER"] = "http://testserver%s" % self.req.path
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
# URL with a different scheme should be ignored as well because bots
# tend to use http:// in referers even when browsing HTTPS websites.
self.req.META["HTTP_X_PROTO"] = "https"
self.req.META["SERVER_PORT"] = 443
with self.settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_PROTO", "https")):
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
def test_referer_equal_to_requested_url_on_another_domain(self):
self.req.META["HTTP_REFERER"] = "http://anotherserver%s" % self.req.path
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
@override_settings(APPEND_SLASH=True)
def test_referer_equal_to_requested_url_without_trailing_slash_with_append_slash(
self,
):
self.req.path = self.req.path_info = "/regular_url/that/does/not/exist/"
self.req.META["HTTP_REFERER"] = self.req.path_info[:-1]
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
@override_settings(APPEND_SLASH=False)
def test_referer_equal_to_requested_url_without_trailing_slash_with_no_append_slash(
self,
):
self.req.path = self.req.path_info = "/regular_url/that/does/not/exist/"
self.req.META["HTTP_REFERER"] = self.req.path_info[:-1]
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
@override_settings(ROOT_URLCONF="middleware.cond_get_urls")
class ConditionalGetMiddlewareTest(SimpleTestCase):
request_factory = RequestFactory()
def setUp(self):
self.req = self.request_factory.get("/")
self.resp_headers = {}
def get_response(self, req):
resp = self.client.get(req.path_info)
for key, value in self.resp_headers.items():
resp[key] = value
return resp
# Tests for the ETag header
def test_middleware_calculates_etag(self):
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
self.assertNotEqual("", resp["ETag"])
def test_middleware_wont_overwrite_etag(self):
self.resp_headers["ETag"] = "eggs"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
self.assertEqual("eggs", resp["ETag"])
def test_no_etag_streaming_response(self):
def get_response(req):
return StreamingHttpResponse(["content"])
self.assertFalse(
ConditionalGetMiddleware(get_response)(self.req).has_header("ETag")
)
def test_no_etag_response_empty_content(self):
def get_response(req):
return HttpResponse()
self.assertFalse(
ConditionalGetMiddleware(get_response)(self.req).has_header("ETag")
)
def test_no_etag_no_store_cache(self):
self.resp_headers["Cache-Control"] = "No-Cache, No-Store, Max-age=0"
self.assertFalse(
ConditionalGetMiddleware(self.get_response)(self.req).has_header("ETag")
)
def test_etag_extended_cache_control(self):
self.resp_headers["Cache-Control"] = 'my-directive="my-no-store"'
self.assertTrue(
ConditionalGetMiddleware(self.get_response)(self.req).has_header("ETag")
)
def test_if_none_match_and_no_etag(self):
self.req.META["HTTP_IF_NONE_MATCH"] = "spam"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_no_if_none_match_and_etag(self):
self.resp_headers["ETag"] = "eggs"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_if_none_match_and_same_etag(self):
self.req.META["HTTP_IF_NONE_MATCH"] = '"spam"'
self.resp_headers["ETag"] = '"spam"'
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 304)
def test_if_none_match_and_different_etag(self):
self.req.META["HTTP_IF_NONE_MATCH"] = "spam"
self.resp_headers["ETag"] = "eggs"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_if_none_match_and_redirect(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp["ETag"] = "spam"
resp["Location"] = "/"
resp.status_code = 301
return resp
self.req.META["HTTP_IF_NONE_MATCH"] = "spam"
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 301)
def test_if_none_match_and_client_error(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp["ETag"] = "spam"
resp.status_code = 400
return resp
self.req.META["HTTP_IF_NONE_MATCH"] = "spam"
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 400)
# Tests for the Last-Modified header
def test_if_modified_since_and_no_last_modified(self):
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_no_if_modified_since_and_last_modified(self):
self.resp_headers["Last-Modified"] = "Sat, 12 Feb 2011 17:38:44 GMT"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_if_modified_since_and_same_last_modified(self):
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
self.resp_headers["Last-Modified"] = "Sat, 12 Feb 2011 17:38:44 GMT"
self.resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(self.resp.status_code, 304)
def test_if_modified_since_and_last_modified_in_the_past(self):
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
self.resp_headers["Last-Modified"] = "Sat, 12 Feb 2011 17:35:44 GMT"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 304)
def test_if_modified_since_and_last_modified_in_the_future(self):
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
self.resp_headers["Last-Modified"] = "Sat, 12 Feb 2011 17:41:44 GMT"
self.resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(self.resp.status_code, 200)
def test_if_modified_since_and_redirect(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp["Last-Modified"] = "Sat, 12 Feb 2011 17:35:44 GMT"
resp["Location"] = "/"
resp.status_code = 301
return resp
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 301)
def test_if_modified_since_and_client_error(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp["Last-Modified"] = "Sat, 12 Feb 2011 17:35:44 GMT"
resp.status_code = 400
return resp
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 400)
def test_not_modified_headers(self):
"""
The 304 Not Modified response should include only the headers required
by RFC 9110 Section 15.4.5, Last-Modified, and the cookies.
"""
def get_response(req):
resp = self.client.get(req.path_info)
resp["Date"] = "Sat, 12 Feb 2011 17:35:44 GMT"
resp["Last-Modified"] = "Sat, 12 Feb 2011 17:35:44 GMT"
resp["Expires"] = "Sun, 13 Feb 2011 17:35:44 GMT"
resp["Vary"] = "Cookie"
resp["Cache-Control"] = "public"
resp["Content-Location"] = "/alt"
resp["Content-Language"] = "en" # shouldn't be preserved
resp["ETag"] = '"spam"'
resp.set_cookie("key", "value")
return resp
self.req.META["HTTP_IF_NONE_MATCH"] = '"spam"'
new_response = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(new_response.status_code, 304)
base_response = get_response(self.req)
for header in (
"Cache-Control",
"Content-Location",
"Date",
"ETag",
"Expires",
"Last-Modified",
"Vary",
):
self.assertEqual(
new_response.headers[header], base_response.headers[header]
)
self.assertEqual(new_response.cookies, base_response.cookies)
self.assertNotIn("Content-Language", new_response)
def test_no_unsafe(self):
"""
ConditionalGetMiddleware shouldn't return a conditional response on an
unsafe request. A response has already been generated by the time
ConditionalGetMiddleware is called, so it's too late to return a 412
Precondition Failed.
"""
def get_200_response(req):
return HttpResponse(status=200)
response = ConditionalGetMiddleware(self.get_response)(self.req)
etag = response.headers["ETag"]
put_request = self.request_factory.put("/", headers={"if-match": etag})
conditional_get_response = ConditionalGetMiddleware(get_200_response)(
put_request
)
self.assertEqual(
conditional_get_response.status_code, 200
) # should never be a 412
def test_no_head(self):
"""
ConditionalGetMiddleware shouldn't compute and return an ETag on a
HEAD request since it can't do so accurately without access to the
response body of the corresponding GET.
"""
def get_200_response(req):
return HttpResponse(status=200)
request = self.request_factory.head("/")
conditional_get_response = ConditionalGetMiddleware(get_200_response)(request)
self.assertNotIn("ETag", conditional_get_response)
class XFrameOptionsMiddlewareTest(SimpleTestCase):
"""
Tests for the X-Frame-Options clickjacking prevention middleware.
"""
def test_same_origin(self):
"""
The X_FRAME_OPTIONS setting can be set to SAMEORIGIN to have the
middleware use that value for the HTTP header.
"""
with override_settings(X_FRAME_OPTIONS="SAMEORIGIN"):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
with override_settings(X_FRAME_OPTIONS="sameorigin"):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
def test_deny(self):
"""
The X_FRAME_OPTIONS setting can be set to DENY to have the middleware
use that value for the HTTP header.
"""
with override_settings(X_FRAME_OPTIONS="DENY"):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "DENY")
with override_settings(X_FRAME_OPTIONS="deny"):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "DENY")
def test_defaults_sameorigin(self):
"""
If the X_FRAME_OPTIONS setting is not set then it defaults to
DENY.
"""
with override_settings(X_FRAME_OPTIONS=None):
del settings.X_FRAME_OPTIONS # restored by override_settings
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "DENY")
def test_dont_set_if_set(self):
"""
If the X-Frame-Options header is already set then the middleware does
not attempt to override it.
"""
def same_origin_response(request):
response = HttpResponse()
response.headers["X-Frame-Options"] = "SAMEORIGIN"
return response
def deny_response(request):
response = HttpResponse()
response.headers["X-Frame-Options"] = "DENY"
return response
with override_settings(X_FRAME_OPTIONS="DENY"):
r = XFrameOptionsMiddleware(same_origin_response)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
with override_settings(X_FRAME_OPTIONS="SAMEORIGIN"):
r = XFrameOptionsMiddleware(deny_response)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "DENY")
def test_response_exempt(self):
"""
If the response has an xframe_options_exempt attribute set to False
then it still sets the header, but if it's set to True then it doesn't.
"""
def xframe_exempt_response(request):
response = HttpResponse()
response.xframe_options_exempt = True
return response
def xframe_not_exempt_response(request):
response = HttpResponse()
response.xframe_options_exempt = False
return response
with override_settings(X_FRAME_OPTIONS="SAMEORIGIN"):
r = XFrameOptionsMiddleware(xframe_not_exempt_response)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
r = XFrameOptionsMiddleware(xframe_exempt_response)(HttpRequest())
self.assertIsNone(r.headers.get("X-Frame-Options"))
def test_is_extendable(self):
"""
The XFrameOptionsMiddleware method that determines the X-Frame-Options
header value can be overridden based on something in the request or
response.
"""
class OtherXFrameOptionsMiddleware(XFrameOptionsMiddleware):
# This is just an example for testing purposes...
def get_xframe_options_value(self, request, response):
if getattr(request, "sameorigin", False):
return "SAMEORIGIN"
if getattr(response, "sameorigin", False):
return "SAMEORIGIN"
return "DENY"
def same_origin_response(request):
response = HttpResponse()
response.sameorigin = True
return response
with override_settings(X_FRAME_OPTIONS="DENY"):
r = OtherXFrameOptionsMiddleware(same_origin_response)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
request = HttpRequest()
request.sameorigin = True
r = OtherXFrameOptionsMiddleware(get_response_empty)(request)
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
with override_settings(X_FRAME_OPTIONS="SAMEORIGIN"):
r = OtherXFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "DENY")
class GZipMiddlewareTest(SimpleTestCase):
"""
Tests the GZipMiddleware.
"""
short_string = b"This string is too short to be worth compressing."
compressible_string = b"a" * 500
incompressible_string = b"".join(
int2byte(random.randint(0, 255)) for _ in range(500)
)
sequence = [b"a" * 500, b"b" * 200, b"a" * 300]
sequence_unicode = ["a" * 500, "é" * 200, "a" * 300]
request_factory = RequestFactory()
def setUp(self):
self.req = self.request_factory.get("/")
self.req.META["HTTP_ACCEPT_ENCODING"] = "gzip, deflate"
self.req.META[
"HTTP_USER_AGENT"
] = "Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1"
self.resp = HttpResponse()
self.resp.status_code = 200
self.resp.content = self.compressible_string
self.resp["Content-Type"] = "text/html; charset=UTF-8"
def get_response(self, request):
return self.resp
@staticmethod
def decompress(gzipped_string):
with gzip.GzipFile(mode="rb", fileobj=BytesIO(gzipped_string)) as f:
return f.read()
@staticmethod
def get_mtime(gzipped_string):
with gzip.GzipFile(mode="rb", fileobj=BytesIO(gzipped_string)) as f:
f.read() # must read the data before accessing the header
return f.mtime
def test_compress_response(self):
"""
Compression is performed on responses with compressible content.
"""
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(self.decompress(r.content), self.compressible_string)
self.assertEqual(r.get("Content-Encoding"), "gzip")
self.assertEqual(r.get("Content-Length"), str(len(r.content)))
def test_compress_streaming_response(self):
"""
Compression is performed on responses with streaming content.
"""
def get_stream_response(request):
resp = StreamingHttpResponse(self.sequence)
resp["Content-Type"] = "text/html; charset=UTF-8"
return resp
r = GZipMiddleware(get_stream_response)(self.req)
self.assertEqual(self.decompress(b"".join(r)), b"".join(self.sequence))
self.assertEqual(r.get("Content-Encoding"), "gzip")
self.assertFalse(r.has_header("Content-Length"))
async def test_compress_async_streaming_response(self):
"""
Compression is performed on responses with async streaming content.
"""
async def get_stream_response(request):
async def iterator():
for chunk in self.sequence:
yield chunk
resp = StreamingHttpResponse(iterator())
resp["Content-Type"] = "text/html; charset=UTF-8"
return resp
r = await GZipMiddleware(get_stream_response)(self.req)
self.assertEqual(
self.decompress(b"".join([chunk async for chunk in r])),
b"".join(self.sequence),
)
self.assertEqual(r.get("Content-Encoding"), "gzip")
self.assertFalse(r.has_header("Content-Length"))
def test_compress_streaming_response_unicode(self):
"""
Compression is performed on responses with streaming Unicode content.
"""
def get_stream_response_unicode(request):
resp = StreamingHttpResponse(self.sequence_unicode)
resp["Content-Type"] = "text/html; charset=UTF-8"
return resp
r = GZipMiddleware(get_stream_response_unicode)(self.req)
self.assertEqual(
self.decompress(b"".join(r)),
b"".join(x.encode() for x in self.sequence_unicode),
)
self.assertEqual(r.get("Content-Encoding"), "gzip")
self.assertFalse(r.has_header("Content-Length"))
def test_compress_file_response(self):
"""
Compression is performed on FileResponse.
"""
with open(__file__, "rb") as file1:
def get_response(req):
file_resp = FileResponse(file1)
file_resp["Content-Type"] = "text/html; charset=UTF-8"
return file_resp
r = GZipMiddleware(get_response)(self.req)
with open(__file__, "rb") as file2:
self.assertEqual(self.decompress(b"".join(r)), file2.read())
self.assertEqual(r.get("Content-Encoding"), "gzip")
self.assertIsNot(r.file_to_stream, file1)
def test_compress_non_200_response(self):
"""
Compression is performed on responses with a status other than 200
(#10762).
"""
self.resp.status_code = 404
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(self.decompress(r.content), self.compressible_string)
self.assertEqual(r.get("Content-Encoding"), "gzip")
def test_no_compress_short_response(self):
"""
Compression isn't performed on responses with short content.
"""
self.resp.content = self.short_string
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r.content, self.short_string)
self.assertIsNone(r.get("Content-Encoding"))
def test_no_compress_compressed_response(self):
"""
Compression isn't performed on responses that are already compressed.
"""
self.resp["Content-Encoding"] = "deflate"
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r.content, self.compressible_string)
self.assertEqual(r.get("Content-Encoding"), "deflate")
def test_no_compress_incompressible_response(self):
"""
Compression isn't performed on responses with incompressible content.
"""
self.resp.content = self.incompressible_string
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r.content, self.incompressible_string)
self.assertIsNone(r.get("Content-Encoding"))
def test_compress_deterministic(self):
"""
Compression results are the same for the same content and don't
include a modification time (since that would make the results
of compression non-deterministic and prevent
ConditionalGetMiddleware from recognizing conditional matches
on gzipped content).
"""
class DeterministicGZipMiddleware(GZipMiddleware):
max_random_bytes = 0
r1 = DeterministicGZipMiddleware(self.get_response)(self.req)
r2 = DeterministicGZipMiddleware(self.get_response)(self.req)
self.assertEqual(r1.content, r2.content)
self.assertEqual(self.get_mtime(r1.content), 0)
self.assertEqual(self.get_mtime(r2.content), 0)
def test_random_bytes(self):
"""A random number of bytes is added to mitigate the BREACH attack."""
with mock.patch(
"django.utils.text.secrets.randbelow", autospec=True, return_value=3
):
r = GZipMiddleware(self.get_response)(self.req)
# The fourth byte of a gzip stream contains flags.
self.assertEqual(r.content[3], gzip.FNAME)
# A 3 byte filename "aaa" and a null byte are added.
self.assertEqual(r.content[10:14], b"aaa\x00")
self.assertEqual(self.decompress(r.content), self.compressible_string)
def test_random_bytes_streaming_response(self):
"""A random number of bytes is added to mitigate the BREACH attack."""
def get_stream_response(request):
resp = StreamingHttpResponse(self.sequence)
resp["Content-Type"] = "text/html; charset=UTF-8"
return resp
with mock.patch(
"django.utils.text.secrets.randbelow", autospec=True, return_value=3
):
r = GZipMiddleware(get_stream_response)(self.req)
content = b"".join(r)
# The fourth byte of a gzip stream contains flags.
self.assertEqual(content[3], gzip.FNAME)
# A 3 byte filename "aaa" and a null byte are added.
self.assertEqual(content[10:14], b"aaa\x00")
self.assertEqual(self.decompress(content), b"".join(self.sequence))
class ETagGZipMiddlewareTest(SimpleTestCase):
"""
ETags are handled properly by GZipMiddleware.
"""
rf = RequestFactory()
compressible_string = b"a" * 500
def test_strong_etag_modified(self):
"""
GZipMiddleware makes a strong ETag weak.
"""
def get_response(req):
response = HttpResponse(self.compressible_string)
response.headers["ETag"] = '"eggs"'
return response
request = self.rf.get("/", headers={"accept-encoding": "gzip, deflate"})
gzip_response = GZipMiddleware(get_response)(request)
self.assertEqual(gzip_response.headers["ETag"], 'W/"eggs"')
def test_weak_etag_not_modified(self):
"""
GZipMiddleware doesn't modify a weak ETag.
"""
def get_response(req):
response = HttpResponse(self.compressible_string)
response.headers["ETag"] = 'W/"eggs"'
return response
request = self.rf.get("/", headers={"accept-encoding": "gzip, deflate"})
gzip_response = GZipMiddleware(get_response)(request)
self.assertEqual(gzip_response.headers["ETag"], 'W/"eggs"')
def test_etag_match(self):
"""
GZipMiddleware allows 304 Not Modified responses.
"""
def get_response(req):
return HttpResponse(self.compressible_string)
def get_cond_response(req):
return ConditionalGetMiddleware(get_response)(req)
request = self.rf.get("/", headers={"accept-encoding": "gzip, deflate"})
response = GZipMiddleware(get_cond_response)(request)
gzip_etag = response.headers["ETag"]
next_request = self.rf.get(
"/",
headers={"accept-encoding": "gzip, deflate", "if-none-match": gzip_etag},
)
next_response = ConditionalGetMiddleware(get_response)(next_request)
self.assertEqual(next_response.status_code, 304)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.