hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
d265ac661b149a53f73f39e47ffa6ec26fb9edde598d21e1274144d116c3f39d | from django.forms import CharField, Form, HiddenInput
from .base import WidgetTest
class HiddenInputTest(WidgetTest):
widget = HiddenInput()
def test_render(self):
self.check_html(
self.widget, "email", "", html='<input type="hidden" name="email">'
)
def test_use_required_attribute(self):
# Always False to avoid browser validation on inputs hidden from the
# user.
self.assertIs(self.widget.use_required_attribute(None), False)
self.assertIs(self.widget.use_required_attribute(""), False)
self.assertIs(self.widget.use_required_attribute("foo"), False)
def test_fieldset(self):
class TestForm(Form):
template_name = "forms_tests/use_fieldset.html"
field = CharField(widget=self.widget)
form = TestForm()
self.assertIs(self.widget.use_fieldset, False)
self.assertHTMLEqual(
'<input type="hidden" name="field" id="id_field">',
form.render(),
)
|
19e15a10dcd1ea0056e44ccaf51de4426929d40dde8b10f079cf72d08d15ec48 | from django.forms import CharField, Form, TextInput
from django.utils.safestring import mark_safe
from .base import WidgetTest
class TextInputTest(WidgetTest):
widget = TextInput()
def test_render(self):
self.check_html(
self.widget, "email", "", html='<input type="text" name="email">'
)
def test_render_none(self):
self.check_html(
self.widget, "email", None, html='<input type="text" name="email">'
)
def test_render_value(self):
self.check_html(
self.widget,
"email",
"[email protected]",
html=('<input type="text" name="email" value="[email protected]">'),
)
def test_render_boolean(self):
"""
Boolean values are rendered to their string forms ("True" and
"False").
"""
self.check_html(
self.widget,
"get_spam",
False,
html=('<input type="text" name="get_spam" value="False">'),
)
self.check_html(
self.widget,
"get_spam",
True,
html=('<input type="text" name="get_spam" value="True">'),
)
def test_render_quoted(self):
self.check_html(
self.widget,
"email",
'some "quoted" & ampersanded value',
html=(
'<input type="text" name="email" '
'value="some "quoted" & ampersanded value">'
),
)
def test_render_custom_attrs(self):
self.check_html(
self.widget,
"email",
"[email protected]",
attrs={"class": "fun"},
html=(
'<input type="text" name="email" value="[email protected]" class="fun">'
),
)
def test_render_unicode(self):
self.check_html(
self.widget,
"email",
"ŠĐĆŽćžšđ",
attrs={"class": "fun"},
html=(
'<input type="text" name="email" '
'value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111" class="fun">'
),
)
def test_constructor_attrs(self):
widget = TextInput(attrs={"class": "fun", "type": "email"})
self.check_html(
widget, "email", "", html='<input type="email" class="fun" name="email">'
)
self.check_html(
widget,
"email",
"[email protected]",
html=(
'<input type="email" class="fun" value="[email protected]" name="email">'
),
)
def test_attrs_precedence(self):
"""
`attrs` passed to render() get precedence over those passed to the
constructor
"""
widget = TextInput(attrs={"class": "pretty"})
self.check_html(
widget,
"email",
"",
attrs={"class": "special"},
html='<input type="text" class="special" name="email">',
)
def test_attrs_safestring(self):
widget = TextInput(attrs={"onBlur": mark_safe("function('foo')")})
self.check_html(
widget,
"email",
"",
html='<input onBlur="function(\'foo\')" type="text" name="email">',
)
def test_use_required_attribute(self):
# Text inputs can safely trigger the browser validation.
self.assertIs(self.widget.use_required_attribute(None), True)
self.assertIs(self.widget.use_required_attribute(""), True)
self.assertIs(self.widget.use_required_attribute("resume.txt"), True)
def test_fieldset(self):
class TestForm(Form):
template_name = "forms_tests/use_fieldset.html"
field = CharField(widget=self.widget)
form = TestForm()
self.assertIs(self.widget.use_fieldset, False)
self.assertHTMLEqual(
'<div><label for="id_field">Field:</label>'
'<input type="text" name="field" required id="id_field"></div>',
form.render(),
)
|
a0f514a592ae45659f5c8270457e103fb9c37e8775c5159437965fa99fbade1f | import copy
import datetime
from django.forms import ChoiceField, Form, Select
from django.test import override_settings
from django.utils.safestring import mark_safe
from .base import WidgetTest
class SelectTest(WidgetTest):
widget = Select
nested_widget = Select(
choices=(
("outer1", "Outer 1"),
('Group "1"', (("inner1", "Inner 1"), ("inner2", "Inner 2"))),
)
)
def test_render(self):
self.check_html(
self.widget(choices=self.beatles),
"beatle",
"J",
html=(
"""<select name="beatle">
<option value="J" selected>John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
),
)
def test_render_none(self):
"""
If the value is None, none of the options are selected.
"""
self.check_html(
self.widget(choices=self.beatles),
"beatle",
None,
html=(
"""<select name="beatle">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
),
)
def test_render_label_value(self):
"""
If the value corresponds to a label (but not to an option value), none
of the options are selected.
"""
self.check_html(
self.widget(choices=self.beatles),
"beatle",
"John",
html=(
"""<select name="beatle">
<option value="J">John</option>
<option value="P">Paul</option>
<option value="G">George</option>
<option value="R">Ringo</option>
</select>"""
),
)
def test_render_selected(self):
"""
Only one option can be selected (#8103).
"""
choices = [("0", "0"), ("1", "1"), ("2", "2"), ("3", "3"), ("0", "extra")]
self.check_html(
self.widget(choices=choices),
"choices",
"0",
html=(
"""<select name="choices">
<option value="0" selected>0</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3">3</option>
<option value="0">extra</option>
</select>"""
),
)
def test_constructor_attrs(self):
"""
Select options shouldn't inherit the parent widget attrs.
"""
widget = Select(
attrs={"class": "super", "id": "super"},
choices=[(1, 1), (2, 2), (3, 3)],
)
self.check_html(
widget,
"num",
2,
html=(
"""<select name="num" class="super" id="super">
<option value="1">1</option>
<option value="2" selected>2</option>
<option value="3">3</option>
</select>"""
),
)
def test_compare_to_str(self):
"""
The value is compared to its str().
"""
self.check_html(
self.widget(choices=[("1", "1"), ("2", "2"), ("3", "3")]),
"num",
2,
html=(
"""<select name="num">
<option value="1">1</option>
<option value="2" selected>2</option>
<option value="3">3</option>
</select>"""
),
)
self.check_html(
self.widget(choices=[(1, 1), (2, 2), (3, 3)]),
"num",
"2",
html=(
"""<select name="num">
<option value="1">1</option>
<option value="2" selected>2</option>
<option value="3">3</option>
</select>"""
),
)
self.check_html(
self.widget(choices=[(1, 1), (2, 2), (3, 3)]),
"num",
2,
html=(
"""<select name="num">
<option value="1">1</option>
<option value="2" selected>2</option>
<option value="3">3</option>
</select>"""
),
)
def test_choices_constructor(self):
widget = Select(choices=[(1, 1), (2, 2), (3, 3)])
self.check_html(
widget,
"num",
2,
html=(
"""<select name="num">
<option value="1">1</option>
<option value="2" selected>2</option>
<option value="3">3</option>
</select>"""
),
)
def test_choices_constructor_generator(self):
"""
If choices is passed to the constructor and is a generator, it can be
iterated over multiple times without getting consumed.
"""
def get_choices():
for i in range(5):
yield (i, i)
widget = Select(choices=get_choices())
self.check_html(
widget,
"num",
2,
html=(
"""<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2" selected>2</option>
<option value="3">3</option>
<option value="4">4</option>
</select>"""
),
)
self.check_html(
widget,
"num",
3,
html=(
"""<select name="num">
<option value="0">0</option>
<option value="1">1</option>
<option value="2">2</option>
<option value="3" selected>3</option>
<option value="4">4</option>
</select>"""
),
)
def test_choices_escaping(self):
choices = (("bad", "you & me"), ("good", mark_safe("you > me")))
self.check_html(
self.widget(choices=choices),
"escape",
None,
html=(
"""<select name="escape">
<option value="bad">you & me</option>
<option value="good">you > me</option>
</select>"""
),
)
def test_choices_unicode(self):
self.check_html(
self.widget(choices=[("ŠĐĆŽćžšđ", "ŠĐabcĆŽćžšđ"), ("ćžšđ", "abcćžšđ")]),
"email",
"ŠĐĆŽćžšđ",
html=(
"""
<select name="email">
<option value="\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111"
selected>
\u0160\u0110abc\u0106\u017d\u0107\u017e\u0161\u0111
</option>
<option value="\u0107\u017e\u0161\u0111">abc\u0107\u017e\u0161\u0111
</option>
</select>
"""
),
)
def test_choices_optgroup(self):
"""
Choices can be nested one level in order to create HTML optgroups.
"""
self.check_html(
self.nested_widget,
"nestchoice",
None,
html=(
"""<select name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>"""
),
)
def test_choices_select_outer(self):
self.check_html(
self.nested_widget,
"nestchoice",
"outer1",
html=(
"""<select name="nestchoice">
<option value="outer1" selected>Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1">Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>"""
),
)
def test_choices_select_inner(self):
self.check_html(
self.nested_widget,
"nestchoice",
"inner1",
html=(
"""<select name="nestchoice">
<option value="outer1">Outer 1</option>
<optgroup label="Group "1"">
<option value="inner1" selected>Inner 1</option>
<option value="inner2">Inner 2</option>
</optgroup>
</select>"""
),
)
@override_settings(USE_THOUSAND_SEPARATOR=True)
def test_doesnt_localize_option_value(self):
choices = [
(1, "One"),
(1000, "One thousand"),
(1000000, "One million"),
]
html = """
<select name="number">
<option value="1">One</option>
<option value="1000">One thousand</option>
<option value="1000000">One million</option>
</select>
"""
self.check_html(self.widget(choices=choices), "number", None, html=html)
choices = [
(datetime.time(0, 0), "midnight"),
(datetime.time(12, 0), "noon"),
]
html = """
<select name="time">
<option value="00:00:00">midnight</option>
<option value="12:00:00">noon</option>
</select>
"""
self.check_html(self.widget(choices=choices), "time", None, html=html)
def test_options(self):
options = list(
self.widget(choices=self.beatles).options(
"name",
["J"],
attrs={"class": "super"},
)
)
self.assertEqual(len(options), 4)
self.assertEqual(options[0]["name"], "name")
self.assertEqual(options[0]["value"], "J")
self.assertEqual(options[0]["label"], "John")
self.assertEqual(options[0]["index"], "0")
self.assertIs(options[0]["selected"], True)
# Template-related attributes
self.assertEqual(options[1]["name"], "name")
self.assertEqual(options[1]["value"], "P")
self.assertEqual(options[1]["label"], "Paul")
self.assertEqual(options[1]["index"], "1")
self.assertIs(options[1]["selected"], False)
def test_optgroups(self):
choices = [
(
"Audio",
[
("vinyl", "Vinyl"),
("cd", "CD"),
],
),
(
"Video",
[
("vhs", "VHS Tape"),
("dvd", "DVD"),
],
),
("unknown", "Unknown"),
]
groups = list(
self.widget(choices=choices).optgroups(
"name",
["vhs"],
attrs={"class": "super"},
)
)
audio, video, unknown = groups
label, options, index = audio
self.assertEqual(label, "Audio")
self.assertEqual(
options,
[
{
"value": "vinyl",
"type": "select",
"attrs": {},
"index": "0_0",
"label": "Vinyl",
"template_name": "django/forms/widgets/select_option.html",
"name": "name",
"selected": False,
"wrap_label": True,
},
{
"value": "cd",
"type": "select",
"attrs": {},
"index": "0_1",
"label": "CD",
"template_name": "django/forms/widgets/select_option.html",
"name": "name",
"selected": False,
"wrap_label": True,
},
],
)
self.assertEqual(index, 0)
label, options, index = video
self.assertEqual(label, "Video")
self.assertEqual(
options,
[
{
"value": "vhs",
"template_name": "django/forms/widgets/select_option.html",
"label": "VHS Tape",
"attrs": {"selected": True},
"index": "1_0",
"name": "name",
"selected": True,
"type": "select",
"wrap_label": True,
},
{
"value": "dvd",
"template_name": "django/forms/widgets/select_option.html",
"label": "DVD",
"attrs": {},
"index": "1_1",
"name": "name",
"selected": False,
"type": "select",
"wrap_label": True,
},
],
)
self.assertEqual(index, 1)
label, options, index = unknown
self.assertIsNone(label)
self.assertEqual(
options,
[
{
"value": "unknown",
"selected": False,
"template_name": "django/forms/widgets/select_option.html",
"label": "Unknown",
"attrs": {},
"index": "2",
"name": "name",
"type": "select",
"wrap_label": True,
}
],
)
self.assertEqual(index, 2)
def test_optgroups_integer_choices(self):
"""The option 'value' is the same type as what's in `choices`."""
groups = list(
self.widget(choices=[[0, "choice text"]]).optgroups("name", ["vhs"])
)
label, options, index = groups[0]
self.assertEqual(options[0]["value"], 0)
def test_deepcopy(self):
"""
__deepcopy__() should copy all attributes properly (#25085).
"""
widget = Select()
obj = copy.deepcopy(widget)
self.assertIsNot(widget, obj)
self.assertEqual(widget.choices, obj.choices)
self.assertIsNot(widget.choices, obj.choices)
self.assertEqual(widget.attrs, obj.attrs)
self.assertIsNot(widget.attrs, obj.attrs)
def test_doesnt_render_required_when_impossible_to_select_empty_field(self):
widget = self.widget(choices=[("J", "John"), ("P", "Paul")])
self.assertIs(widget.use_required_attribute(initial=None), False)
def test_renders_required_when_possible_to_select_empty_field_str(self):
widget = self.widget(choices=[("", "select please"), ("P", "Paul")])
self.assertIs(widget.use_required_attribute(initial=None), True)
def test_renders_required_when_possible_to_select_empty_field_list(self):
widget = self.widget(choices=[["", "select please"], ["P", "Paul"]])
self.assertIs(widget.use_required_attribute(initial=None), True)
def test_renders_required_when_possible_to_select_empty_field_none(self):
widget = self.widget(choices=[(None, "select please"), ("P", "Paul")])
self.assertIs(widget.use_required_attribute(initial=None), True)
def test_doesnt_render_required_when_no_choices_are_available(self):
widget = self.widget(choices=[])
self.assertIs(widget.use_required_attribute(initial=None), False)
def test_fieldset(self):
class TestForm(Form):
template_name = "forms_tests/use_fieldset.html"
field = ChoiceField(widget=self.widget, choices=self.beatles)
form = TestForm()
self.assertIs(self.widget.use_fieldset, False)
self.assertHTMLEqual(
'<div><label for="id_field">Field:</label>'
'<select name="field" id="id_field">'
'<option value="J">John</option> '
'<option value="P">Paul</option>'
'<option value="G">George</option>'
'<option value="R">Ringo</option></select></div>',
form.render(),
)
|
55ad20690724081da30b9dd0172291c1c2bc3ab8f6436be682c5724c9f542f42 | from django.forms import Form, NullBooleanField, NullBooleanSelect
from django.utils import translation
from .base import WidgetTest
class NullBooleanSelectTest(WidgetTest):
widget = NullBooleanSelect()
def test_render_true(self):
self.check_html(
self.widget,
"is_cool",
True,
html=(
"""<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true" selected>Yes</option>
<option value="false">No</option>
</select>"""
),
)
def test_render_false(self):
self.check_html(
self.widget,
"is_cool",
False,
html=(
"""<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true">Yes</option>
<option value="false" selected>No</option>
</select>"""
),
)
def test_render_none(self):
self.check_html(
self.widget,
"is_cool",
None,
html=(
"""<select name="is_cool">
<option value="unknown" selected>Unknown</option>
<option value="true">Yes</option>
<option value="false">No</option>
</select>"""
),
)
def test_render_value_unknown(self):
self.check_html(
self.widget,
"is_cool",
"unknown",
html=(
"""<select name="is_cool">
<option value="unknown" selected>Unknown</option>
<option value="true">Yes</option>
<option value="false">No</option>
</select>"""
),
)
def test_render_value_true(self):
self.check_html(
self.widget,
"is_cool",
"true",
html=(
"""<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true" selected>Yes</option>
<option value="false">No</option>
</select>"""
),
)
def test_render_value_false(self):
self.check_html(
self.widget,
"is_cool",
"false",
html=(
"""<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true">Yes</option>
<option value="false" selected>No</option>
</select>"""
),
)
def test_render_value_1(self):
self.check_html(
self.widget,
"is_cool",
"1",
html=(
"""<select name="is_cool">
<option value="unknown" selected>Unknown</option>
<option value="true">Yes</option>
<option value="false">No</option>
</select>"""
),
)
def test_render_value_2(self):
self.check_html(
self.widget,
"is_cool",
"2",
html=(
"""<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true" selected>Yes</option>
<option value="false">No</option>
</select>"""
),
)
def test_render_value_3(self):
self.check_html(
self.widget,
"is_cool",
"3",
html=(
"""<select name="is_cool">
<option value="unknown">Unknown</option>
<option value="true">Yes</option>
<option value="false" selected>No</option>
</select>"""
),
)
def test_l10n(self):
"""
The NullBooleanSelect widget's options are lazily localized (#17190).
"""
widget = NullBooleanSelect()
with translation.override("de-at"):
self.check_html(
widget,
"id_bool",
True,
html=(
"""
<select name="id_bool">
<option value="unknown">Unbekannt</option>
<option value="true" selected>Ja</option>
<option value="false">Nein</option>
</select>
"""
),
)
def test_fieldset(self):
class TestForm(Form):
template_name = "forms_tests/use_fieldset.html"
field = NullBooleanField(widget=self.widget)
form = TestForm()
self.assertIs(self.widget.use_fieldset, False)
self.assertHTMLEqual(
'<div><label for="id_field">Field:</label>'
'<select name="field" id="id_field">'
'<option value="unknown" selected>Unknown</option>'
'<option value="true">Yes</option>'
'<option value="false">No</option></select></div>',
form.render(),
)
|
f909df0e446ae5601985e922c7611528d5927d6ab1fa93a1191ab7a7252af1a1 | from datetime import time
from django.forms import CharField, Form, TimeInput
from django.utils import translation
from .base import WidgetTest
class TimeInputTest(WidgetTest):
widget = TimeInput()
def test_render_none(self):
self.check_html(
self.widget, "time", None, html='<input type="text" name="time">'
)
def test_render_value(self):
"""
The microseconds are trimmed on display, by default.
"""
t = time(12, 51, 34, 482548)
self.assertEqual(str(t), "12:51:34.482548")
self.check_html(
self.widget,
"time",
t,
html='<input type="text" name="time" value="12:51:34">',
)
self.check_html(
self.widget,
"time",
time(12, 51, 34),
html=('<input type="text" name="time" value="12:51:34">'),
)
self.check_html(
self.widget,
"time",
time(12, 51),
html=('<input type="text" name="time" value="12:51:00">'),
)
def test_string(self):
"""Initializing from a string value."""
self.check_html(
self.widget,
"time",
"13:12:11",
html=('<input type="text" name="time" value="13:12:11">'),
)
def test_format(self):
"""
Use 'format' to change the way a value is displayed.
"""
t = time(12, 51, 34, 482548)
widget = TimeInput(format="%H:%M", attrs={"type": "time"})
self.check_html(
widget, "time", t, html='<input type="time" name="time" value="12:51">'
)
@translation.override("de-at")
def test_l10n(self):
t = time(12, 51, 34, 482548)
self.check_html(
self.widget,
"time",
t,
html='<input type="text" name="time" value="12:51:34">',
)
def test_fieldset(self):
class TestForm(Form):
template_name = "forms_tests/use_fieldset.html"
field = CharField(widget=self.widget)
form = TestForm()
self.assertIs(self.widget.use_fieldset, False)
self.assertHTMLEqual(
'<div><label for="id_field">Field:</label>'
'<input id="id_field" name="field" required type="text"></div>',
form.render(),
)
|
ac11ea249fd4b5e9df307e6dd384b9ee77808858eb1c7febfc3b43a332c74bde | from django.forms import Form, MultipleChoiceField, MultipleHiddenInput
from django.utils.datastructures import MultiValueDict
from .base import WidgetTest
class MultipleHiddenInputTest(WidgetTest):
widget = MultipleHiddenInput()
def test_render_single(self):
self.check_html(
self.widget,
"email",
["[email protected]"],
html='<input type="hidden" name="email" value="[email protected]">',
)
def test_render_multiple(self):
self.check_html(
self.widget,
"email",
["[email protected]", "[email protected]"],
html=(
'<input type="hidden" name="email" value="[email protected]">\n'
'<input type="hidden" name="email" value="[email protected]">'
),
)
def test_render_attrs(self):
self.check_html(
self.widget,
"email",
["[email protected]"],
attrs={"class": "fun"},
html=(
'<input type="hidden" name="email" value="[email protected]" '
'class="fun">'
),
)
def test_render_attrs_multiple(self):
self.check_html(
self.widget,
"email",
["[email protected]", "[email protected]"],
attrs={"class": "fun"},
html=(
'<input type="hidden" name="email" value="[email protected]" '
'class="fun">\n'
'<input type="hidden" name="email" value="[email protected]" class="fun">'
),
)
def test_render_attrs_constructor(self):
widget = MultipleHiddenInput(attrs={"class": "fun"})
self.check_html(widget, "email", [], "")
self.check_html(
widget,
"email",
["[email protected]"],
html=(
'<input type="hidden" class="fun" value="[email protected]" name="email">'
),
)
self.check_html(
widget,
"email",
["[email protected]", "[email protected]"],
html=(
'<input type="hidden" class="fun" value="[email protected]" '
'name="email">\n'
'<input type="hidden" class="fun" value="[email protected]" '
'name="email">'
),
)
self.check_html(
widget,
"email",
["[email protected]"],
attrs={"class": "special"},
html=(
'<input type="hidden" class="special" value="[email protected]" '
'name="email">'
),
)
def test_render_empty(self):
self.check_html(self.widget, "email", [], "")
def test_render_none(self):
self.check_html(self.widget, "email", None, "")
def test_render_increment_id(self):
"""
Each input should get a separate ID.
"""
self.check_html(
self.widget,
"letters",
["a", "b", "c"],
attrs={"id": "hideme"},
html=(
'<input type="hidden" name="letters" value="a" id="hideme_0">\n'
'<input type="hidden" name="letters" value="b" id="hideme_1">\n'
'<input type="hidden" name="letters" value="c" id="hideme_2">'
),
)
def test_fieldset(self):
class TestForm(Form):
template_name = "forms_tests/use_fieldset.html"
composers = MultipleChoiceField(
choices=[("J", "John Lennon"), ("P", "Paul McCartney")],
widget=MultipleHiddenInput,
)
form = TestForm(MultiValueDict({"composers": ["J", "P"]}))
self.assertIs(self.widget.use_fieldset, False)
self.assertHTMLEqual(
'<input type="hidden" name="composers" value="J" id="id_composers_0">'
'<input type="hidden" name="composers" value="P" id="id_composers_1">',
form.render(),
)
|
17897f0af5bbc1ab73810dd37801bf55f8c9e9a34247019006142f49ad2daa6c | import datetime
from django.forms import ChoiceField, Form, MultiWidget, RadioSelect
from django.test import override_settings
from .base import WidgetTest
class RadioSelectTest(WidgetTest):
widget = RadioSelect
def test_render(self):
choices = (("", "------"),) + self.beatles
self.check_html(
self.widget(choices=choices),
"beatle",
"J",
html="""
<div>
<div><label><input type="radio" name="beatle" value=""> ------</label></div>
<div><label>
<input checked type="radio" name="beatle" value="J"> John</label></div>
<div><label><input type="radio" name="beatle" value="P"> Paul</label></div>
<div><label>
<input type="radio" name="beatle" value="G"> George</label></div>
<div><label><input type="radio" name="beatle" value="R"> Ringo</label></div>
</div>
""",
)
def test_nested_choices(self):
nested_choices = (
("unknown", "Unknown"),
("Audio", (("vinyl", "Vinyl"), ("cd", "CD"))),
("Video", (("vhs", "VHS"), ("dvd", "DVD"))),
)
html = """
<div id="media">
<div>
<label for="media_0">
<input type="radio" name="nestchoice" value="unknown" id="media_0"> Unknown
</label></div>
<div>
<label>Audio</label>
<div>
<label for="media_1_0">
<input type="radio" name="nestchoice" value="vinyl" id="media_1_0"> Vinyl
</label></div>
<div> <label for="media_1_1">
<input type="radio" name="nestchoice" value="cd" id="media_1_1"> CD
</label></div>
</div><div>
<label>Video</label>
<div>
<label for="media_2_0">
<input type="radio" name="nestchoice" value="vhs" id="media_2_0"> VHS
</label></div>
<div>
<label for="media_2_1">
<input type="radio" name="nestchoice" value="dvd" id="media_2_1" checked> DVD
</label></div>
</div>
</div>
"""
self.check_html(
self.widget(choices=nested_choices),
"nestchoice",
"dvd",
attrs={"id": "media"},
html=html,
)
def test_constructor_attrs(self):
"""
Attributes provided at instantiation are passed to the constituent
inputs.
"""
widget = RadioSelect(attrs={"id": "foo"}, choices=self.beatles)
html = """
<div id="foo">
<div>
<label for="foo_0">
<input checked type="radio" id="foo_0" value="J" name="beatle"> John</label>
</div>
<div><label for="foo_1">
<input type="radio" id="foo_1" value="P" name="beatle"> Paul</label></div>
<div><label for="foo_2">
<input type="radio" id="foo_2" value="G" name="beatle"> George</label></div>
<div><label for="foo_3">
<input type="radio" id="foo_3" value="R" name="beatle"> Ringo</label></div>
</div>
"""
self.check_html(widget, "beatle", "J", html=html)
def test_render_attrs(self):
"""
Attributes provided at render-time are passed to the constituent
inputs.
"""
html = """
<div id="bar">
<div>
<label for="bar_0">
<input checked type="radio" id="bar_0" value="J" name="beatle"> John</label>
</div>
<div><label for="bar_1">
<input type="radio" id="bar_1" value="P" name="beatle"> Paul</label></div>
<div><label for="bar_2">
<input type="radio" id="bar_2" value="G" name="beatle"> George</label></div>
<div><label for="bar_3">
<input type="radio" id="bar_3" value="R" name="beatle"> Ringo</label></div>
</div>
"""
self.check_html(
self.widget(choices=self.beatles),
"beatle",
"J",
attrs={"id": "bar"},
html=html,
)
def test_class_attrs(self):
"""
The <div> in the multiple_input.html widget template include the class
attribute.
"""
html = """
<div class="bar">
<div><label>
<input checked type="radio" class="bar" value="J" name="beatle"> John</label>
</div>
<div><label>
<input type="radio" class="bar" value="P" name="beatle"> Paul</label></div>
<div><label>
<input type="radio" class="bar" value="G" name="beatle"> George</label></div>
<div><label>
<input type="radio" class="bar" value="R" name="beatle"> Ringo</label></div>
</div>
"""
self.check_html(
self.widget(choices=self.beatles),
"beatle",
"J",
attrs={"class": "bar"},
html=html,
)
@override_settings(USE_THOUSAND_SEPARATOR=True)
def test_doesnt_localize_input_value(self):
choices = [
(1, "One"),
(1000, "One thousand"),
(1000000, "One million"),
]
html = """
<div>
<div><label><input type="radio" name="number" value="1"> One</label></div>
<div><label>
<input type="radio" name="number" value="1000"> One thousand</label></div>
<div><label>
<input type="radio" name="number" value="1000000"> One million</label></div>
</div>
"""
self.check_html(self.widget(choices=choices), "number", None, html=html)
choices = [
(datetime.time(0, 0), "midnight"),
(datetime.time(12, 0), "noon"),
]
html = """
<div>
<div><label>
<input type="radio" name="time" value="00:00:00"> midnight</label></div>
<div><label>
<input type="radio" name="time" value="12:00:00"> noon</label></div>
</div>
"""
self.check_html(self.widget(choices=choices), "time", None, html=html)
def test_render_as_subwidget(self):
"""A RadioSelect as a subwidget of MultiWidget."""
choices = (("", "------"),) + self.beatles
self.check_html(
MultiWidget([self.widget(choices=choices)]),
"beatle",
["J"],
html="""
<div>
<div><label>
<input type="radio" name="beatle_0" value=""> ------</label></div>
<div><label>
<input checked type="radio" name="beatle_0" value="J"> John</label></div>
<div><label>
<input type="radio" name="beatle_0" value="P"> Paul</label></div>
<div><label>
<input type="radio" name="beatle_0" value="G"> George</label></div>
<div><label>
<input type="radio" name="beatle_0" value="R"> Ringo</label></div>
</div>
""",
)
def test_fieldset(self):
class TestForm(Form):
template_name = "forms_tests/use_fieldset.html"
field = ChoiceField(
widget=self.widget, choices=self.beatles, required=False
)
form = TestForm()
self.assertIs(self.widget.use_fieldset, True)
self.assertHTMLEqual(
'<div><fieldset><legend>Field:</legend><div id="id_field">'
'<div><label for="id_field_0">'
'<input type="radio" name="field" value="J" id="id_field_0"> John'
'</label></div><div><label for="id_field_1">'
'<input type="radio" name="field" value="P" id="id_field_1">Paul'
'</label></div><div><label for="id_field_2"><input type="radio" '
'name="field" value="G" id="id_field_2"> George</label></div>'
'<div><label for="id_field_3"><input type="radio" name="field" '
'value="R" id="id_field_3">Ringo</label></div></div></fieldset>'
"</div>",
form.render(),
)
|
e6d6ae8f322952a12044cf925337bfcedf78cda373465798781d9c2da7429cc3 | import asyncio
import logging
from django.core.exceptions import ImproperlyConfigured
from django.http import (
HttpResponse,
HttpResponseGone,
HttpResponseNotAllowed,
HttpResponsePermanentRedirect,
HttpResponseRedirect,
)
from django.template.response import TemplateResponse
from django.urls import reverse
from django.utils.decorators import classonlymethod
from django.utils.functional import classproperty
logger = logging.getLogger("django.request")
class ContextMixin:
"""
A default context mixin that passes the keyword arguments received by
get_context_data() as the template context.
"""
extra_context = None
def get_context_data(self, **kwargs):
kwargs.setdefault("view", self)
if self.extra_context is not None:
kwargs.update(self.extra_context)
return kwargs
class View:
"""
Intentionally simple parent class for all views. Only implements
dispatch-by-method and simple sanity checking.
"""
http_method_names = [
"get",
"post",
"put",
"patch",
"delete",
"head",
"options",
"trace",
]
def __init__(self, **kwargs):
"""
Constructor. Called in the URLconf; can contain helpful extra
keyword arguments, and other things.
"""
# Go through keyword arguments, and either save their values to our
# instance, or raise an error.
for key, value in kwargs.items():
setattr(self, key, value)
@classproperty
def view_is_async(cls):
handlers = [
getattr(cls, method)
for method in cls.http_method_names
if (method != "options" and hasattr(cls, method))
]
if not handlers:
return False
is_async = asyncio.iscoroutinefunction(handlers[0])
if not all(asyncio.iscoroutinefunction(h) == is_async for h in handlers[1:]):
raise ImproperlyConfigured(
f"{cls.__qualname__} HTTP handlers must either be all sync or all "
"async."
)
return is_async
@classonlymethod
def as_view(cls, **initkwargs):
"""Main entry point for a request-response process."""
for key in initkwargs:
if key in cls.http_method_names:
raise TypeError(
"The method name %s is not accepted as a keyword argument "
"to %s()." % (key, cls.__name__)
)
if not hasattr(cls, key):
raise TypeError(
"%s() received an invalid keyword %r. as_view "
"only accepts arguments that are already "
"attributes of the class." % (cls.__name__, key)
)
def view(request, *args, **kwargs):
self = cls(**initkwargs)
self.setup(request, *args, **kwargs)
if not hasattr(self, "request"):
raise AttributeError(
"%s instance has no 'request' attribute. Did you override "
"setup() and forget to call super()?" % cls.__name__
)
return self.dispatch(request, *args, **kwargs)
view.view_class = cls
view.view_initkwargs = initkwargs
# __name__ and __qualname__ are intentionally left unchanged as
# view_class should be used to robustly determine the name of the view
# instead.
view.__doc__ = cls.__doc__
view.__module__ = cls.__module__
view.__annotations__ = cls.dispatch.__annotations__
# Copy possible attributes set by decorators, e.g. @csrf_exempt, from
# the dispatch method.
view.__dict__.update(cls.dispatch.__dict__)
# Mark the callback if the view class is async.
if cls.view_is_async:
view._is_coroutine = asyncio.coroutines._is_coroutine
return view
def setup(self, request, *args, **kwargs):
"""Initialize attributes shared by all view methods."""
if hasattr(self, "get") and not hasattr(self, "head"):
self.head = self.get
self.request = request
self.args = args
self.kwargs = kwargs
def dispatch(self, request, *args, **kwargs):
# Try to dispatch to the right method; if a method doesn't exist,
# defer to the error handler. Also defer to the error handler if the
# request method isn't on the approved list.
if request.method.lower() in self.http_method_names:
handler = getattr(
self, request.method.lower(), self.http_method_not_allowed
)
else:
handler = self.http_method_not_allowed
return handler(request, *args, **kwargs)
def http_method_not_allowed(self, request, *args, **kwargs):
logger.warning(
"Method Not Allowed (%s): %s",
request.method,
request.path,
extra={"status_code": 405, "request": request},
)
return HttpResponseNotAllowed(self._allowed_methods())
def options(self, request, *args, **kwargs):
"""Handle responding to requests for the OPTIONS HTTP verb."""
response = HttpResponse()
response.headers["Allow"] = ", ".join(self._allowed_methods())
response.headers["Content-Length"] = "0"
if self.view_is_async:
async def func():
return response
return func()
else:
return response
def _allowed_methods(self):
return [m.upper() for m in self.http_method_names if hasattr(self, m)]
class TemplateResponseMixin:
"""A mixin that can be used to render a template."""
template_name = None
template_engine = None
response_class = TemplateResponse
content_type = None
def render_to_response(self, context, **response_kwargs):
"""
Return a response, using the `response_class` for this view, with a
template rendered with the given context.
Pass response_kwargs to the constructor of the response class.
"""
response_kwargs.setdefault("content_type", self.content_type)
return self.response_class(
request=self.request,
template=self.get_template_names(),
context=context,
using=self.template_engine,
**response_kwargs,
)
def get_template_names(self):
"""
Return a list of template names to be used for the request. Must return
a list. May not be called if render_to_response() is overridden.
"""
if self.template_name is None:
raise ImproperlyConfigured(
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
else:
return [self.template_name]
class TemplateView(TemplateResponseMixin, ContextMixin, View):
"""
Render a template. Pass keyword arguments from the URLconf to the context.
"""
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
class RedirectView(View):
"""Provide a redirect on any GET request."""
permanent = False
url = None
pattern_name = None
query_string = False
def get_redirect_url(self, *args, **kwargs):
"""
Return the URL redirect to. Keyword arguments from the URL pattern
match generating the redirect request are provided as kwargs to this
method.
"""
if self.url:
url = self.url % kwargs
elif self.pattern_name:
url = reverse(self.pattern_name, args=args, kwargs=kwargs)
else:
return None
args = self.request.META.get("QUERY_STRING", "")
if args and self.query_string:
url = "%s?%s" % (url, args)
return url
def get(self, request, *args, **kwargs):
url = self.get_redirect_url(*args, **kwargs)
if url:
if self.permanent:
return HttpResponsePermanentRedirect(url)
else:
return HttpResponseRedirect(url)
else:
logger.warning(
"Gone: %s", request.path, extra={"status_code": 410, "request": request}
)
return HttpResponseGone()
def head(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def options(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
return self.get(request, *args, **kwargs)
|
29a26049065e9111d9e6e11b5b2b2146f51c5497bd7937d1845ff51fc56d308f | import collections.abc
import copy
import datetime
import decimal
import math
import operator
import uuid
import warnings
from base64 import b64decode, b64encode
from functools import partialmethod, total_ordering
from django import forms
from django.apps import apps
from django.conf import settings
from django.core import checks, exceptions, validators
from django.db import connection, connections, router
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin
from django.utils import timezone
from django.utils.datastructures import DictWrapper
from django.utils.dateparse import (
parse_date,
parse_datetime,
parse_duration,
parse_time,
)
from django.utils.duration import duration_microseconds, duration_string
from django.utils.functional import Promise, cached_property
from django.utils.ipv6 import clean_ipv6_address
from django.utils.itercompat import is_iterable
from django.utils.text import capfirst
from django.utils.translation import gettext_lazy as _
__all__ = [
"AutoField",
"BLANK_CHOICE_DASH",
"BigAutoField",
"BigIntegerField",
"BinaryField",
"BooleanField",
"CharField",
"CommaSeparatedIntegerField",
"DateField",
"DateTimeField",
"DecimalField",
"DurationField",
"EmailField",
"Empty",
"Field",
"FilePathField",
"FloatField",
"GenericIPAddressField",
"IPAddressField",
"IntegerField",
"NOT_PROVIDED",
"NullBooleanField",
"PositiveBigIntegerField",
"PositiveIntegerField",
"PositiveSmallIntegerField",
"SlugField",
"SmallAutoField",
"SmallIntegerField",
"TextField",
"TimeField",
"URLField",
"UUIDField",
]
class Empty:
pass
class NOT_PROVIDED:
pass
# The values to use for "blank" in SelectFields. Will be appended to the start
# of most "choices" lists.
BLANK_CHOICE_DASH = [("", "---------")]
def _load_field(app_label, model_name, field_name):
return apps.get_model(app_label, model_name)._meta.get_field(field_name)
# A guide to Field parameters:
#
# * name: The name of the field specified in the model.
# * attname: The attribute to use on the model object. This is the same as
# "name", except in the case of ForeignKeys, where "_id" is
# appended.
# * db_column: The db_column specified in the model (or None).
# * column: The database column for this field. This is the same as
# "attname", except if db_column is specified.
#
# Code that introspects values, or does other dynamic things, should use
# attname. For example, this gets the primary key value of object "obj":
#
# getattr(obj, opts.pk.attname)
def _empty(of_cls):
new = Empty()
new.__class__ = of_cls
return new
def return_None():
return None
@total_ordering
class Field(RegisterLookupMixin):
"""Base class for all field types"""
# Designates whether empty strings fundamentally are allowed at the
# database level.
empty_strings_allowed = True
empty_values = list(validators.EMPTY_VALUES)
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that Django implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
default_validators = [] # Default set of validators
default_error_messages = {
"invalid_choice": _("Value %(value)r is not a valid choice."),
"null": _("This field cannot be null."),
"blank": _("This field cannot be blank."),
"unique": _("%(model_name)s with this %(field_label)s already exists."),
# Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
# Eg: "Title must be unique for pub_date year"
"unique_for_date": _(
"%(field_label)s must be unique for "
"%(date_field_label)s %(lookup_type)s."
),
}
system_check_deprecated_details = None
system_check_removed_details = None
# Attributes that don't affect a column definition.
# These attributes are ignored when altering the field.
non_db_attrs = (
"blank",
"choices",
"db_column",
"editable",
"error_messages",
"help_text",
"limit_choices_to",
# Database-level options are not supported, see #21961.
"on_delete",
"related_name",
"related_query_name",
"validators",
"verbose_name",
)
# Field flags
hidden = False
many_to_many = None
many_to_one = None
one_to_many = None
one_to_one = None
related_model = None
descriptor_class = DeferredAttribute
# Generic field type description, usually overridden by subclasses
def _description(self):
return _("Field of type: %(field_type)s") % {
"field_type": self.__class__.__name__
}
description = property(_description)
def __init__(
self,
verbose_name=None,
name=None,
primary_key=False,
max_length=None,
unique=False,
blank=False,
null=False,
db_index=False,
rel=None,
default=NOT_PROVIDED,
editable=True,
serialize=True,
unique_for_date=None,
unique_for_month=None,
unique_for_year=None,
choices=None,
help_text="",
db_column=None,
db_tablespace=None,
auto_created=False,
validators=(),
error_messages=None,
):
self.name = name
self.verbose_name = verbose_name # May be set by set_attributes_from_name
self._verbose_name = verbose_name # Store original for deconstruction
self.primary_key = primary_key
self.max_length, self._unique = max_length, unique
self.blank, self.null = blank, null
self.remote_field = rel
self.is_relation = self.remote_field is not None
self.default = default
self.editable = editable
self.serialize = serialize
self.unique_for_date = unique_for_date
self.unique_for_month = unique_for_month
self.unique_for_year = unique_for_year
if isinstance(choices, collections.abc.Iterator):
choices = list(choices)
self.choices = choices
self.help_text = help_text
self.db_index = db_index
self.db_column = db_column
self._db_tablespace = db_tablespace
self.auto_created = auto_created
# Adjust the appropriate creation counter, and save our local copy.
if auto_created:
self.creation_counter = Field.auto_creation_counter
Field.auto_creation_counter -= 1
else:
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
self._validators = list(validators) # Store for deconstruction later
self._error_messages = error_messages # Store for deconstruction later
def __str__(self):
"""
Return "app_label.model_label.field_name" for fields attached to
models.
"""
if not hasattr(self, "model"):
return super().__str__()
model = self.model
return "%s.%s" % (model._meta.label, self.name)
def __repr__(self):
"""Display the module, class, and name of the field."""
path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__)
name = getattr(self, "name", None)
if name is not None:
return "<%s: %s>" % (path, name)
return "<%s>" % path
def check(self, **kwargs):
return [
*self._check_field_name(),
*self._check_choices(),
*self._check_db_index(),
*self._check_null_allowed_for_primary_keys(),
*self._check_backend_specific_checks(**kwargs),
*self._check_validators(),
*self._check_deprecation_details(),
]
def _check_field_name(self):
"""
Check if field name is valid, i.e. 1) does not end with an
underscore, 2) does not contain "__" and 3) is not "pk".
"""
if self.name.endswith("_"):
return [
checks.Error(
"Field names must not end with an underscore.",
obj=self,
id="fields.E001",
)
]
elif LOOKUP_SEP in self.name:
return [
checks.Error(
'Field names must not contain "%s".' % LOOKUP_SEP,
obj=self,
id="fields.E002",
)
]
elif self.name == "pk":
return [
checks.Error(
"'pk' is a reserved word that cannot be used as a field name.",
obj=self,
id="fields.E003",
)
]
else:
return []
@classmethod
def _choices_is_value(cls, value):
return isinstance(value, (str, Promise)) or not is_iterable(value)
def _check_choices(self):
if not self.choices:
return []
if not is_iterable(self.choices) or isinstance(self.choices, str):
return [
checks.Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=self,
id="fields.E004",
)
]
choice_max_length = 0
# Expect [group_name, [value, display]]
for choices_group in self.choices:
try:
group_name, group_choices = choices_group
except (TypeError, ValueError):
# Containing non-pairs
break
try:
if not all(
self._choices_is_value(value) and self._choices_is_value(human_name)
for value, human_name in group_choices
):
break
if self.max_length is not None and group_choices:
choice_max_length = max(
[
choice_max_length,
*(
len(value)
for value, _ in group_choices
if isinstance(value, str)
),
]
)
except (TypeError, ValueError):
# No groups, choices in the form [value, display]
value, human_name = group_name, group_choices
if not self._choices_is_value(value) or not self._choices_is_value(
human_name
):
break
if self.max_length is not None and isinstance(value, str):
choice_max_length = max(choice_max_length, len(value))
# Special case: choices=['ab']
if isinstance(choices_group, str):
break
else:
if self.max_length is not None and choice_max_length > self.max_length:
return [
checks.Error(
"'max_length' is too small to fit the longest value "
"in 'choices' (%d characters)." % choice_max_length,
obj=self,
id="fields.E009",
),
]
return []
return [
checks.Error(
"'choices' must be an iterable containing "
"(actual value, human readable name) tuples.",
obj=self,
id="fields.E005",
)
]
def _check_db_index(self):
if self.db_index not in (None, True, False):
return [
checks.Error(
"'db_index' must be None, True or False.",
obj=self,
id="fields.E006",
)
]
else:
return []
def _check_null_allowed_for_primary_keys(self):
if (
self.primary_key
and self.null
and not connection.features.interprets_empty_strings_as_nulls
):
# We cannot reliably check this for backends like Oracle which
# consider NULL and '' to be equal (and thus set up
# character-based fields a little differently).
return [
checks.Error(
"Primary keys must not have null=True.",
hint=(
"Set null=False on the field, or "
"remove primary_key=True argument."
),
obj=self,
id="fields.E007",
)
]
else:
return []
def _check_backend_specific_checks(self, databases=None, **kwargs):
if databases is None:
return []
app_label = self.model._meta.app_label
errors = []
for alias in databases:
if router.allow_migrate(
alias, app_label, model_name=self.model._meta.model_name
):
errors.extend(connections[alias].validation.check_field(self, **kwargs))
return errors
def _check_validators(self):
errors = []
for i, validator in enumerate(self.validators):
if not callable(validator):
errors.append(
checks.Error(
"All 'validators' must be callable.",
hint=(
"validators[{i}] ({repr}) isn't a function or "
"instance of a validator class.".format(
i=i,
repr=repr(validator),
)
),
obj=self,
id="fields.E008",
)
)
return errors
def _check_deprecation_details(self):
if self.system_check_removed_details is not None:
return [
checks.Error(
self.system_check_removed_details.get(
"msg",
"%s has been removed except for support in historical "
"migrations." % self.__class__.__name__,
),
hint=self.system_check_removed_details.get("hint"),
obj=self,
id=self.system_check_removed_details.get("id", "fields.EXXX"),
)
]
elif self.system_check_deprecated_details is not None:
return [
checks.Warning(
self.system_check_deprecated_details.get(
"msg", "%s has been deprecated." % self.__class__.__name__
),
hint=self.system_check_deprecated_details.get("hint"),
obj=self,
id=self.system_check_deprecated_details.get("id", "fields.WXXX"),
)
]
return []
def get_col(self, alias, output_field=None):
if alias == self.model._meta.db_table and (
output_field is None or output_field == self
):
return self.cached_col
from django.db.models.expressions import Col
return Col(alias, self, output_field)
@cached_property
def cached_col(self):
from django.db.models.expressions import Col
return Col(self.model._meta.db_table, self)
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, GIS columns need to be
selected as AsText(table.col) on MySQL as the table.col data can't be
used by Django.
"""
return sql, params
def deconstruct(self):
"""
Return enough information to recreate the field as a 4-tuple:
* The name of the field on the model, if contribute_to_class() has
been run.
* The import path of the field, including the class, e.g.
django.db.models.IntegerField. This should be the most portable
version, so less specific may be better.
* A list of positional arguments.
* A dict of keyword arguments.
Note that the positional or keyword arguments must contain values of
the following types (including inner values of collection types):
* None, bool, str, int, float, complex, set, frozenset, list, tuple,
dict
* UUID
* datetime.datetime (naive), datetime.date
* top-level classes, top-level functions - will be referenced by their
full import path
* Storage instances - these have their own deconstruct() method
This is because the values here must be serialized into a text format
(possibly new Python code, possibly JSON) and these are the only types
with encoding handlers defined.
There's no need to return the exact way the field was instantiated this
time, just ensure that the resulting field is the same - prefer keyword
arguments over positional ones, and omit parameters with their default
values.
"""
# Short-form way of fetching all the default parameters
keywords = {}
possibles = {
"verbose_name": None,
"primary_key": False,
"max_length": None,
"unique": False,
"blank": False,
"null": False,
"db_index": False,
"default": NOT_PROVIDED,
"editable": True,
"serialize": True,
"unique_for_date": None,
"unique_for_month": None,
"unique_for_year": None,
"choices": None,
"help_text": "",
"db_column": None,
"db_tablespace": None,
"auto_created": False,
"validators": [],
"error_messages": None,
}
attr_overrides = {
"unique": "_unique",
"error_messages": "_error_messages",
"validators": "_validators",
"verbose_name": "_verbose_name",
"db_tablespace": "_db_tablespace",
}
equals_comparison = {"choices", "validators"}
for name, default in possibles.items():
value = getattr(self, attr_overrides.get(name, name))
# Unroll anything iterable for choices into a concrete list
if name == "choices" and isinstance(value, collections.abc.Iterable):
value = list(value)
# Do correct kind of comparison
if name in equals_comparison:
if value != default:
keywords[name] = value
else:
if value is not default:
keywords[name] = value
# Work out path - we shorten it for known Django core fields
path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__)
if path.startswith("django.db.models.fields.related"):
path = path.replace("django.db.models.fields.related", "django.db.models")
elif path.startswith("django.db.models.fields.files"):
path = path.replace("django.db.models.fields.files", "django.db.models")
elif path.startswith("django.db.models.fields.json"):
path = path.replace("django.db.models.fields.json", "django.db.models")
elif path.startswith("django.db.models.fields.proxy"):
path = path.replace("django.db.models.fields.proxy", "django.db.models")
elif path.startswith("django.db.models.fields"):
path = path.replace("django.db.models.fields", "django.db.models")
# Return basic info - other fields should override this.
return (self.name, path, [], keywords)
def clone(self):
"""
Uses deconstruct() to clone a new copy of this Field.
Will not preserve any class attachments/attribute names.
"""
name, path, args, kwargs = self.deconstruct()
return self.__class__(*args, **kwargs)
def __eq__(self, other):
# Needed for @total_ordering
if isinstance(other, Field):
return self.creation_counter == other.creation_counter and getattr(
self, "model", None
) == getattr(other, "model", None)
return NotImplemented
def __lt__(self, other):
# This is needed because bisect does not take a comparison function.
# Order by creation_counter first for backward compatibility.
if isinstance(other, Field):
if (
self.creation_counter != other.creation_counter
or not hasattr(self, "model")
and not hasattr(other, "model")
):
return self.creation_counter < other.creation_counter
elif hasattr(self, "model") != hasattr(other, "model"):
return not hasattr(self, "model") # Order no-model fields first
else:
# creation_counter's are equal, compare only models.
return (self.model._meta.app_label, self.model._meta.model_name) < (
other.model._meta.app_label,
other.model._meta.model_name,
)
return NotImplemented
def __hash__(self):
return hash(self.creation_counter)
def __deepcopy__(self, memodict):
# We don't have to deepcopy very much here, since most things are not
# intended to be altered after initial creation.
obj = copy.copy(self)
if self.remote_field:
obj.remote_field = copy.copy(self.remote_field)
if hasattr(self.remote_field, "field") and self.remote_field.field is self:
obj.remote_field.field = obj
memodict[id(self)] = obj
return obj
def __copy__(self):
# We need to avoid hitting __reduce__, so define this
# slightly weird copy construct.
obj = Empty()
obj.__class__ = self.__class__
obj.__dict__ = self.__dict__.copy()
return obj
def __reduce__(self):
"""
Pickling should return the model._meta.fields instance of the field,
not a new copy of that field. So, use the app registry to load the
model and then the field back.
"""
if not hasattr(self, "model"):
# Fields are sometimes used without attaching them to models (for
# example in aggregation). In this case give back a plain field
# instance. The code below will create a new empty instance of
# class self.__class__, then update its dict with self.__dict__
# values - so, this is very close to normal pickle.
state = self.__dict__.copy()
# The _get_default cached_property can't be pickled due to lambda
# usage.
state.pop("_get_default", None)
return _empty, (self.__class__,), state
return _load_field, (
self.model._meta.app_label,
self.model._meta.object_name,
self.name,
)
def get_pk_value_on_save(self, instance):
"""
Hook to generate new PK values on save. This method is called when
saving instances with no primary key value set. If this method returns
something else than None, then the returned value is used when saving
the new instance.
"""
if self.default:
return self.get_default()
return None
def to_python(self, value):
"""
Convert the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Return the converted value. Subclasses should override this.
"""
return value
@cached_property
def error_messages(self):
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, "default_error_messages", {}))
messages.update(self._error_messages or {})
return messages
@cached_property
def validators(self):
"""
Some validators can't be created at field initialization time.
This method provides a way to delay their creation until required.
"""
return [*self.default_validators, *self._validators]
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except exceptions.ValidationError as e:
if hasattr(e, "code") and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise exceptions.ValidationError(errors)
def validate(self, value, model_instance):
"""
Validate value and raise ValidationError if necessary. Subclasses
should override this to provide validation logic.
"""
if not self.editable:
# Skip validation for non-editable fields.
return
if self.choices is not None and value not in self.empty_values:
for option_key, option_value in self.choices:
if isinstance(option_value, (list, tuple)):
# This is an optgroup, so look inside the group for
# options.
for optgroup_key, optgroup_value in option_value:
if value == optgroup_key:
return
elif value == option_key:
return
raise exceptions.ValidationError(
self.error_messages["invalid_choice"],
code="invalid_choice",
params={"value": value},
)
if value is None and not self.null:
raise exceptions.ValidationError(self.error_messages["null"], code="null")
if not self.blank and value in self.empty_values:
raise exceptions.ValidationError(self.error_messages["blank"], code="blank")
def clean(self, value, model_instance):
"""
Convert the value's type and run validation. Validation errors
from to_python() and validate() are propagated. Return the correct
value if no error is raised.
"""
value = self.to_python(value)
self.validate(value, model_instance)
self.run_validators(value)
return value
def db_type_parameters(self, connection):
return DictWrapper(self.__dict__, connection.ops.quote_name, "qn_")
def db_check(self, connection):
"""
Return the database column check constraint for this field, for the
provided connection. Works the same way as db_type() for the case that
get_internal_type() does not map to a preexisting model field.
"""
data = self.db_type_parameters(connection)
try:
return (
connection.data_type_check_constraints[self.get_internal_type()] % data
)
except KeyError:
return None
def db_type(self, connection):
"""
Return the database column data type for this field, for the provided
connection.
"""
# The default implementation of this method looks at the
# backend-specific data_types dictionary, looking up the field by its
# "internal type".
#
# A Field class can implement the get_internal_type() method to specify
# which *preexisting* Django Field class it's most similar to -- i.e.,
# a custom field might be represented by a TEXT column type, which is
# the same as the TextField Django field type, which means the custom
# field's get_internal_type() returns 'TextField'.
#
# But the limitation of the get_internal_type() / data_types approach
# is that it cannot handle database column types that aren't already
# mapped to one of the built-in Django field types. In this case, you
# can implement db_type() instead of get_internal_type() to specify
# exactly which wacky database column type you want to use.
data = self.db_type_parameters(connection)
try:
return connection.data_types[self.get_internal_type()] % data
except KeyError:
return None
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. For example, this method is called by ForeignKey and OneToOneField
to determine its data type.
"""
return self.db_type(connection)
def cast_db_type(self, connection):
"""Return the data type to use in the Cast() function."""
db_type = connection.ops.cast_data_types.get(self.get_internal_type())
if db_type:
return db_type % self.db_type_parameters(connection)
return self.db_type(connection)
def db_parameters(self, connection):
"""
Extension of db_type(), providing a range of different return values
(type, checks). This will look at db_type(), allowing custom model
fields to override it.
"""
type_string = self.db_type(connection)
check_string = self.db_check(connection)
return {
"type": type_string,
"check": check_string,
}
def db_type_suffix(self, connection):
return connection.data_types_suffix.get(self.get_internal_type())
def get_db_converters(self, connection):
if hasattr(self, "from_db_value"):
return [self.from_db_value]
return []
@property
def unique(self):
return self._unique or self.primary_key
@property
def db_tablespace(self):
return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
@property
def db_returning(self):
"""
Private API intended only to be used by Django itself. Currently only
the PostgreSQL backend supports returning multiple fields on a model.
"""
return False
def set_attributes_from_name(self, name):
self.name = self.name or name
self.attname, self.column = self.get_attname_column()
self.concrete = self.column is not None
if self.verbose_name is None and self.name:
self.verbose_name = self.name.replace("_", " ")
def contribute_to_class(self, cls, name, private_only=False):
"""
Register the field with the model class it belongs to.
If private_only is True, create a separate instance of this field
for every subclass of cls, even if cls is not an abstract model.
"""
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self, private=private_only)
if self.column:
setattr(cls, self.attname, self.descriptor_class(self))
if self.choices is not None:
# Don't override a get_FOO_display() method defined explicitly on
# this class, but don't check methods derived from inheritance, to
# allow overriding inherited choices. For more complex inheritance
# structures users should override contribute_to_class().
if "get_%s_display" % self.name not in cls.__dict__:
setattr(
cls,
"get_%s_display" % self.name,
partialmethod(cls._get_FIELD_display, field=self),
)
def get_filter_kwargs_for_object(self, obj):
"""
Return a dict that when passed as kwargs to self.model.filter(), would
yield all instances having the same value for this field as obj has.
"""
return {self.name: getattr(obj, self.attname)}
def get_attname(self):
return self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_internal_type(self):
return self.__class__.__name__
def pre_save(self, model_instance, add):
"""Return field's value just before saving."""
return getattr(model_instance, self.attname)
def get_prep_value(self, value):
"""Perform preliminary non-db specific value checks and conversions."""
if isinstance(value, Promise):
value = value._proxy____cast()
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""
Return field's value prepared for interacting with the database backend.
Used by the default implementations of get_db_prep_save().
"""
if not prepared:
value = self.get_prep_value(value)
return value
def get_db_prep_save(self, value, connection):
"""Return field's value prepared for saving into a database."""
return self.get_db_prep_value(value, connection=connection, prepared=False)
def has_default(self):
"""Return a boolean of whether this field has a default value."""
return self.default is not NOT_PROVIDED
def get_default(self):
"""Return the default value for this field."""
return self._get_default()
@cached_property
def _get_default(self):
if self.has_default():
if callable(self.default):
return self.default
return lambda: self.default
if (
not self.empty_strings_allowed
or self.null
and not connection.features.interprets_empty_strings_as_nulls
):
return return_None
return str # return empty string
def get_choices(
self,
include_blank=True,
blank_choice=BLANK_CHOICE_DASH,
limit_choices_to=None,
ordering=(),
):
"""
Return choices with a default blank choices included, for use
as <select> choices for this field.
"""
if self.choices is not None:
choices = list(self.choices)
if include_blank:
blank_defined = any(
choice in ("", None) for choice, _ in self.flatchoices
)
if not blank_defined:
choices = blank_choice + choices
return choices
rel_model = self.remote_field.model
limit_choices_to = limit_choices_to or self.get_limit_choices_to()
choice_func = operator.attrgetter(
self.remote_field.get_related_field().attname
if hasattr(self.remote_field, "get_related_field")
else "pk"
)
qs = rel_model._default_manager.complex_filter(limit_choices_to)
if ordering:
qs = qs.order_by(*ordering)
return (blank_choice if include_blank else []) + [
(choice_func(x), str(x)) for x in qs
]
def value_to_string(self, obj):
"""
Return a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return str(self.value_from_object(obj))
def _get_flatchoices(self):
"""Flattened version of choices tuple."""
if self.choices is None:
return []
flat = []
for choice, value in self.choices:
if isinstance(value, (list, tuple)):
flat.extend(value)
else:
flat.append((choice, value))
return flat
flatchoices = property(_get_flatchoices)
def save_form_data(self, instance, data):
setattr(instance, self.name, data)
def formfield(self, form_class=None, choices_form_class=None, **kwargs):
"""Return a django.forms.Field instance for this field."""
defaults = {
"required": not self.blank,
"label": capfirst(self.verbose_name),
"help_text": self.help_text,
}
if self.has_default():
if callable(self.default):
defaults["initial"] = self.default
defaults["show_hidden_initial"] = True
else:
defaults["initial"] = self.get_default()
if self.choices is not None:
# Fields with choices get special treatment.
include_blank = self.blank or not (
self.has_default() or "initial" in kwargs
)
defaults["choices"] = self.get_choices(include_blank=include_blank)
defaults["coerce"] = self.to_python
if self.null:
defaults["empty_value"] = None
if choices_form_class is not None:
form_class = choices_form_class
else:
form_class = forms.TypedChoiceField
# Many of the subclass-specific formfield arguments (min_value,
# max_value) don't apply for choice fields, so be sure to only pass
# the values that TypedChoiceField will understand.
for k in list(kwargs):
if k not in (
"coerce",
"empty_value",
"choices",
"required",
"widget",
"label",
"initial",
"help_text",
"error_messages",
"show_hidden_initial",
"disabled",
):
del kwargs[k]
defaults.update(kwargs)
if form_class is None:
form_class = forms.CharField
return form_class(**defaults)
def value_from_object(self, obj):
"""Return the value of this field in the given model instance."""
return getattr(obj, self.attname)
class BooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be either True or False."),
"invalid_nullable": _("“%(value)s” value must be either True, False, or None."),
}
description = _("Boolean (Either True or False)")
def get_internal_type(self):
return "BooleanField"
def to_python(self, value):
if self.null and value in self.empty_values:
return None
if value in (True, False):
# 1/0 are equal to True/False. bool() converts former to latter.
return bool(value)
if value in ("t", "True", "1"):
return True
if value in ("f", "False", "0"):
return False
raise exceptions.ValidationError(
self.error_messages["invalid_nullable" if self.null else "invalid"],
code="invalid",
params={"value": value},
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return self.to_python(value)
def formfield(self, **kwargs):
if self.choices is not None:
include_blank = not (self.has_default() or "initial" in kwargs)
defaults = {"choices": self.get_choices(include_blank=include_blank)}
else:
form_class = forms.NullBooleanField if self.null else forms.BooleanField
# In HTML checkboxes, 'required' means "must be checked" which is
# different from the choices case ("must select some value").
# required=False allows unchecked checkboxes.
defaults = {"form_class": form_class, "required": False}
return super().formfield(**{**defaults, **kwargs})
def select_format(self, compiler, sql, params):
sql, params = super().select_format(compiler, sql, params)
# Filters that match everything are handled as empty strings in the
# WHERE clause, but in SELECT or GROUP BY list they must use a
# predicate that's always True.
if sql == "":
sql = "1"
return sql, params
class CharField(Field):
description = _("String (up to %(max_length)s)")
def __init__(self, *args, db_collation=None, **kwargs):
super().__init__(*args, **kwargs)
self.db_collation = db_collation
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
databases = kwargs.get("databases") or []
return [
*super().check(**kwargs),
*self._check_db_collation(databases),
*self._check_max_length_attribute(**kwargs),
]
def _check_max_length_attribute(self, **kwargs):
if self.max_length is None:
return [
checks.Error(
"CharFields must define a 'max_length' attribute.",
obj=self,
id="fields.E120",
)
]
elif (
not isinstance(self.max_length, int)
or isinstance(self.max_length, bool)
or self.max_length <= 0
):
return [
checks.Error(
"'max_length' must be a positive integer.",
obj=self,
id="fields.E121",
)
]
else:
return []
def _check_db_collation(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
self.db_collation is None
or "supports_collation_on_charfield"
in self.model._meta.required_db_features
or connection.features.supports_collation_on_charfield
):
errors.append(
checks.Error(
"%s does not support a database collation on "
"CharFields." % connection.display_name,
obj=self,
id="fields.E190",
),
)
return errors
def cast_db_type(self, connection):
if self.max_length is None:
return connection.ops.cast_char_field_without_max_length
return super().cast_db_type(connection)
def get_internal_type(self):
return "CharField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {"max_length": self.max_length}
# TODO: Handle multiple backends with different feature flags.
if self.null and not connection.features.interprets_empty_strings_as_nulls:
defaults["empty_value"] = None
defaults.update(kwargs)
return super().formfield(**defaults)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.db_collation:
kwargs["db_collation"] = self.db_collation
return name, path, args, kwargs
class CommaSeparatedIntegerField(CharField):
default_validators = [validators.validate_comma_separated_integer_list]
description = _("Comma-separated integers")
system_check_removed_details = {
"msg": (
"CommaSeparatedIntegerField is removed except for support in "
"historical migrations."
),
"hint": (
"Use CharField(validators=[validate_comma_separated_integer_list]) "
"instead."
),
"id": "fields.E901",
}
def _to_naive(value):
if timezone.is_aware(value):
value = timezone.make_naive(value, datetime.timezone.utc)
return value
def _get_naive_now():
return _to_naive(timezone.now())
class DateTimeCheckMixin:
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_mutually_exclusive_options(),
*self._check_fix_default_value(),
]
def _check_mutually_exclusive_options(self):
# auto_now, auto_now_add, and default are mutually exclusive
# options. The use of more than one of these options together
# will trigger an Error
mutually_exclusive_options = [
self.auto_now_add,
self.auto_now,
self.has_default(),
]
enabled_options = [
option not in (None, False) for option in mutually_exclusive_options
].count(True)
if enabled_options > 1:
return [
checks.Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=self,
id="fields.E160",
)
]
else:
return []
def _check_fix_default_value(self):
return []
# Concrete subclasses use this in their implementations of
# _check_fix_default_value().
def _check_if_value_fixed(self, value, now=None):
"""
Check if the given value appears to have been provided as a "fixed"
time value, and include a warning in the returned list if it does. The
value argument must be a date object or aware/naive datetime object. If
now is provided, it must be a naive datetime object.
"""
if now is None:
now = _get_naive_now()
offset = datetime.timedelta(seconds=10)
lower = now - offset
upper = now + offset
if isinstance(value, datetime.datetime):
value = _to_naive(value)
else:
assert isinstance(value, datetime.date)
lower = lower.date()
upper = upper.date()
if lower <= value <= upper:
return [
checks.Warning(
"Fixed default value provided.",
hint=(
"It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`"
),
obj=self,
id="fields.W161",
)
]
return []
class DateField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid date format. It must be "
"in YYYY-MM-DD format."
),
"invalid_date": _(
"“%(value)s” value has the correct format (YYYY-MM-DD) "
"but it is an invalid date."
),
}
description = _("Date (without time)")
def __init__(
self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs
):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs["editable"] = False
kwargs["blank"] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, datetime.datetime):
value = _to_naive(value).date()
elif isinstance(value, datetime.date):
pass
else:
# No explicit date / datetime value -- no checks necessary
return []
# At this point, value is a date object.
return self._check_if_value_fixed(value)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now:
kwargs["auto_now"] = True
if self.auto_now_add:
kwargs["auto_now_add"] = True
if self.auto_now or self.auto_now_add:
del kwargs["editable"]
del kwargs["blank"]
return name, path, args, kwargs
def get_internal_type(self):
return "DateField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
if settings.USE_TZ and timezone.is_aware(value):
# Convert aware datetimes to the default time zone
# before casting them to dates (#17742).
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
return value.date()
if isinstance(value, datetime.date):
return value
try:
parsed = parse_date(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_date"],
code="invalid_date",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.date.today()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def contribute_to_class(self, cls, name, **kwargs):
super().contribute_to_class(cls, name, **kwargs)
if not self.null:
setattr(
cls,
"get_next_by_%s" % self.name,
partialmethod(
cls._get_next_or_previous_by_FIELD, field=self, is_next=True
),
)
setattr(
cls,
"get_previous_by_%s" % self.name,
partialmethod(
cls._get_next_or_previous_by_FIELD, field=self, is_next=False
),
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts dates into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DateField,
**kwargs,
}
)
class DateTimeField(DateField):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."
),
"invalid_date": _(
"“%(value)s” value has the correct format "
"(YYYY-MM-DD) but it is an invalid date."
),
"invalid_datetime": _(
"“%(value)s” value has the correct format "
"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
"but it is an invalid date/time."
),
}
description = _("Date (with time)")
# __init__ is inherited from DateField
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, (datetime.datetime, datetime.date)):
return self._check_if_value_fixed(value)
# No explicit date / datetime value -- no checks necessary.
return []
def get_internal_type(self):
return "DateTimeField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
if settings.USE_TZ:
# For backwards compatibility, interpret naive datetimes in
# local time. This won't work during DST change, but we can't
# do much about it, so we let the exceptions percolate up the
# call stack.
warnings.warn(
"DateTimeField %s.%s received a naive datetime "
"(%s) while time zone support is active."
% (self.model.__name__, self.name, value),
RuntimeWarning,
)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
try:
parsed = parse_datetime(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_datetime"],
code="invalid_datetime",
params={"value": value},
)
try:
parsed = parse_date(value)
if parsed is not None:
return datetime.datetime(parsed.year, parsed.month, parsed.day)
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_date"],
code="invalid_date",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = timezone.now()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
# contribute_to_class is inherited from DateField, it registers
# get_next_by_FOO and get_prev_by_FOO
def get_prep_value(self, value):
value = super().get_prep_value(value)
value = self.to_python(value)
if value is not None and settings.USE_TZ and timezone.is_naive(value):
# For backwards compatibility, interpret naive datetimes in local
# time. This won't work during DST change, but we can't do much
# about it, so we let the exceptions percolate up the call stack.
try:
name = "%s.%s" % (self.model.__name__, self.name)
except AttributeError:
name = "(unbound)"
warnings.warn(
"DateTimeField %s received a naive datetime (%s)"
" while time zone support is active." % (name, value),
RuntimeWarning,
)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
def get_db_prep_value(self, value, connection, prepared=False):
# Casts datetimes into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datetimefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DateTimeField,
**kwargs,
}
)
class DecimalField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be a decimal number."),
}
description = _("Decimal number")
def __init__(
self,
verbose_name=None,
name=None,
max_digits=None,
decimal_places=None,
**kwargs,
):
self.max_digits, self.decimal_places = max_digits, decimal_places
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
digits_errors = [
*self._check_decimal_places(),
*self._check_max_digits(),
]
if not digits_errors:
errors.extend(self._check_decimal_places_and_max_digits(**kwargs))
else:
errors.extend(digits_errors)
return errors
def _check_decimal_places(self):
try:
decimal_places = int(self.decimal_places)
if decimal_places < 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=self,
id="fields.E130",
)
]
except ValueError:
return [
checks.Error(
"'decimal_places' must be a non-negative integer.",
obj=self,
id="fields.E131",
)
]
else:
return []
def _check_max_digits(self):
try:
max_digits = int(self.max_digits)
if max_digits <= 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=self,
id="fields.E132",
)
]
except ValueError:
return [
checks.Error(
"'max_digits' must be a positive integer.",
obj=self,
id="fields.E133",
)
]
else:
return []
def _check_decimal_places_and_max_digits(self, **kwargs):
if int(self.decimal_places) > int(self.max_digits):
return [
checks.Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=self,
id="fields.E134",
)
]
return []
@cached_property
def validators(self):
return super().validators + [
validators.DecimalValidator(self.max_digits, self.decimal_places)
]
@cached_property
def context(self):
return decimal.Context(prec=self.max_digits)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.max_digits is not None:
kwargs["max_digits"] = self.max_digits
if self.decimal_places is not None:
kwargs["decimal_places"] = self.decimal_places
return name, path, args, kwargs
def get_internal_type(self):
return "DecimalField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, float):
if math.isnan(value):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
return self.context.create_decimal_from_float(value)
try:
return decimal.Decimal(value)
except (decimal.InvalidOperation, TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def get_db_prep_save(self, value, connection):
return connection.ops.adapt_decimalfield_value(
self.to_python(value), self.max_digits, self.decimal_places
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"max_digits": self.max_digits,
"decimal_places": self.decimal_places,
"form_class": forms.DecimalField,
**kwargs,
}
)
class DurationField(Field):
"""
Store timedelta objects.
Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint
of microseconds on other databases.
"""
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"[DD] [[HH:]MM:]ss[.uuuuuu] format."
)
}
description = _("Duration")
def get_internal_type(self):
return "DurationField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.timedelta):
return value
try:
parsed = parse_duration(value)
except ValueError:
pass
else:
if parsed is not None:
return parsed
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def get_db_prep_value(self, value, connection, prepared=False):
if connection.features.has_native_duration_field:
return value
if value is None:
return None
return duration_microseconds(value)
def get_db_converters(self, connection):
converters = []
if not connection.features.has_native_duration_field:
converters.append(connection.ops.convert_durationfield_value)
return converters + super().get_db_converters(connection)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else duration_string(val)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DurationField,
**kwargs,
}
)
class EmailField(CharField):
default_validators = [validators.validate_email]
description = _("Email address")
def __init__(self, *args, **kwargs):
# max_length=254 to be compliant with RFCs 3696 and 5321
kwargs.setdefault("max_length", 254)
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# We do not exclude max_length if it matches default as we want to change
# the default in future.
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause email validation to be performed
# twice.
return super().formfield(
**{
"form_class": forms.EmailField,
**kwargs,
}
)
class FilePathField(Field):
description = _("File path")
def __init__(
self,
verbose_name=None,
name=None,
path="",
match=None,
recursive=False,
allow_files=True,
allow_folders=False,
**kwargs,
):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
kwargs.setdefault("max_length", 100)
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_allowing_files_or_folders(**kwargs),
]
def _check_allowing_files_or_folders(self, **kwargs):
if not self.allow_files and not self.allow_folders:
return [
checks.Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' "
"set to True.",
obj=self,
id="fields.E140",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.path != "":
kwargs["path"] = self.path
if self.match is not None:
kwargs["match"] = self.match
if self.recursive is not False:
kwargs["recursive"] = self.recursive
if self.allow_files is not True:
kwargs["allow_files"] = self.allow_files
if self.allow_folders is not False:
kwargs["allow_folders"] = self.allow_folders
if kwargs.get("max_length") == 100:
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"path": self.path() if callable(self.path) else self.path,
"match": self.match,
"recursive": self.recursive,
"form_class": forms.FilePathField,
"allow_files": self.allow_files,
"allow_folders": self.allow_folders,
**kwargs,
}
)
def get_internal_type(self):
return "FilePathField"
class FloatField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be a float."),
}
description = _("Floating point number")
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
try:
return float(value)
except (TypeError, ValueError) as e:
raise e.__class__(
"Field '%s' expected a number but got %r." % (self.name, value),
) from e
def get_internal_type(self):
return "FloatField"
def to_python(self, value):
if value is None:
return value
try:
return float(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.FloatField,
**kwargs,
}
)
class IntegerField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be an integer."),
}
description = _("Integer")
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_max_length_warning(),
]
def _check_max_length_warning(self):
if self.max_length is not None:
return [
checks.Warning(
"'max_length' is ignored when used with %s."
% self.__class__.__name__,
hint="Remove 'max_length' from field",
obj=self,
id="fields.W122",
)
]
return []
@cached_property
def validators(self):
# These validators can't be added at field initialization time since
# they're based on values retrieved from `connection`.
validators_ = super().validators
internal_type = self.get_internal_type()
min_value, max_value = connection.ops.integer_field_range(internal_type)
if min_value is not None and not any(
(
isinstance(validator, validators.MinValueValidator)
and (
validator.limit_value()
if callable(validator.limit_value)
else validator.limit_value
)
>= min_value
)
for validator in validators_
):
validators_.append(validators.MinValueValidator(min_value))
if max_value is not None and not any(
(
isinstance(validator, validators.MaxValueValidator)
and (
validator.limit_value()
if callable(validator.limit_value)
else validator.limit_value
)
<= max_value
)
for validator in validators_
):
validators_.append(validators.MaxValueValidator(max_value))
return validators_
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
try:
return int(value)
except (TypeError, ValueError) as e:
raise e.__class__(
"Field '%s' expected a number but got %r." % (self.name, value),
) from e
def get_internal_type(self):
return "IntegerField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.IntegerField,
**kwargs,
}
)
class BigIntegerField(IntegerField):
description = _("Big (8 byte) integer")
MAX_BIGINT = 9223372036854775807
def get_internal_type(self):
return "BigIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": -BigIntegerField.MAX_BIGINT - 1,
"max_value": BigIntegerField.MAX_BIGINT,
**kwargs,
}
)
class SmallIntegerField(IntegerField):
description = _("Small integer")
def get_internal_type(self):
return "SmallIntegerField"
class IPAddressField(Field):
empty_strings_allowed = False
description = _("IPv4 address")
system_check_removed_details = {
"msg": (
"IPAddressField has been removed except for support in "
"historical migrations."
),
"hint": "Use GenericIPAddressField instead.",
"id": "fields.E900",
}
def __init__(self, *args, **kwargs):
kwargs["max_length"] = 15
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def get_internal_type(self):
return "IPAddressField"
class GenericIPAddressField(Field):
empty_strings_allowed = False
description = _("IP address")
default_error_messages = {}
def __init__(
self,
verbose_name=None,
name=None,
protocol="both",
unpack_ipv4=False,
*args,
**kwargs,
):
self.unpack_ipv4 = unpack_ipv4
self.protocol = protocol
(
self.default_validators,
invalid_error_message,
) = validators.ip_address_validators(protocol, unpack_ipv4)
self.default_error_messages["invalid"] = invalid_error_message
kwargs["max_length"] = 39
super().__init__(verbose_name, name, *args, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_blank_and_null_values(**kwargs),
]
def _check_blank_and_null_values(self, **kwargs):
if not getattr(self, "null", False) and getattr(self, "blank", False):
return [
checks.Error(
"GenericIPAddressFields cannot have blank=True if null=False, "
"as blank values are stored as nulls.",
obj=self,
id="fields.E150",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.unpack_ipv4 is not False:
kwargs["unpack_ipv4"] = self.unpack_ipv4
if self.protocol != "both":
kwargs["protocol"] = self.protocol
if kwargs.get("max_length") == 39:
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return "GenericIPAddressField"
def to_python(self, value):
if value is None:
return None
if not isinstance(value, str):
value = str(value)
value = value.strip()
if ":" in value:
return clean_ipv6_address(
value, self.unpack_ipv4, self.error_messages["invalid"]
)
return value
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_ipaddressfield_value(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
if value and ":" in value:
try:
return clean_ipv6_address(value, self.unpack_ipv4)
except exceptions.ValidationError:
pass
return str(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"protocol": self.protocol,
"form_class": forms.GenericIPAddressField,
**kwargs,
}
)
class NullBooleanField(BooleanField):
default_error_messages = {
"invalid": _("“%(value)s” value must be either None, True or False."),
"invalid_nullable": _("“%(value)s” value must be either None, True or False."),
}
description = _("Boolean (Either True, False or None)")
system_check_removed_details = {
"msg": (
"NullBooleanField is removed except for support in historical "
"migrations."
),
"hint": "Use BooleanField(null=True) instead.",
"id": "fields.E903",
}
def __init__(self, *args, **kwargs):
kwargs["null"] = True
kwargs["blank"] = True
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["null"]
del kwargs["blank"]
return name, path, args, kwargs
class PositiveIntegerRelDbTypeMixin:
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
if not hasattr(cls, "integer_field_class"):
cls.integer_field_class = next(
(
parent
for parent in cls.__mro__[1:]
if issubclass(parent, IntegerField)
),
None,
)
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. In most cases, a foreign key pointing to a positive integer
primary key will have an integer column data type but some databases
(e.g. MySQL) have an unsigned integer type. In that case
(related_fields_match_type=True), the primary key should return its
db_type.
"""
if connection.features.related_fields_match_type:
return self.db_type(connection)
else:
return self.integer_field_class().db_type(connection=connection)
class PositiveBigIntegerField(PositiveIntegerRelDbTypeMixin, BigIntegerField):
description = _("Positive big integer")
def get_internal_type(self):
return "PositiveBigIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
description = _("Positive integer")
def get_internal_type(self):
return "PositiveIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, SmallIntegerField):
description = _("Positive small integer")
def get_internal_type(self):
return "PositiveSmallIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class SlugField(CharField):
default_validators = [validators.validate_slug]
description = _("Slug (up to %(max_length)s)")
def __init__(
self, *args, max_length=50, db_index=True, allow_unicode=False, **kwargs
):
self.allow_unicode = allow_unicode
if self.allow_unicode:
self.default_validators = [validators.validate_unicode_slug]
super().__init__(*args, max_length=max_length, db_index=db_index, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 50:
del kwargs["max_length"]
if self.db_index is False:
kwargs["db_index"] = False
else:
del kwargs["db_index"]
if self.allow_unicode is not False:
kwargs["allow_unicode"] = self.allow_unicode
return name, path, args, kwargs
def get_internal_type(self):
return "SlugField"
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.SlugField,
"allow_unicode": self.allow_unicode,
**kwargs,
}
)
class TextField(Field):
description = _("Text")
def __init__(self, *args, db_collation=None, **kwargs):
super().__init__(*args, **kwargs)
self.db_collation = db_collation
def check(self, **kwargs):
databases = kwargs.get("databases") or []
return [
*super().check(**kwargs),
*self._check_db_collation(databases),
]
def _check_db_collation(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
self.db_collation is None
or "supports_collation_on_textfield"
in self.model._meta.required_db_features
or connection.features.supports_collation_on_textfield
):
errors.append(
checks.Error(
"%s does not support a database collation on "
"TextFields." % connection.display_name,
obj=self,
id="fields.E190",
),
)
return errors
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
return super().formfield(
**{
"max_length": self.max_length,
**({} if self.choices is not None else {"widget": forms.Textarea}),
**kwargs,
}
)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.db_collation:
kwargs["db_collation"] = self.db_collation
return name, path, args, kwargs
class TimeField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"HH:MM[:ss[.uuuuuu]] format."
),
"invalid_time": _(
"“%(value)s” value has the correct format "
"(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."
),
}
description = _("Time")
def __init__(
self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs
):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs["editable"] = False
kwargs["blank"] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, datetime.datetime):
now = None
elif isinstance(value, datetime.time):
now = _get_naive_now()
# This will not use the right date in the race condition where now
# is just before the date change and value is just past 0:00.
value = datetime.datetime.combine(now.date(), value)
else:
# No explicit time / datetime value -- no checks necessary
return []
# At this point, value is a datetime object.
return self._check_if_value_fixed(value, now=now)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now is not False:
kwargs["auto_now"] = self.auto_now
if self.auto_now_add is not False:
kwargs["auto_now_add"] = self.auto_now_add
if self.auto_now or self.auto_now_add:
del kwargs["blank"]
del kwargs["editable"]
return name, path, args, kwargs
def get_internal_type(self):
return "TimeField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, datetime.time):
return value
if isinstance(value, datetime.datetime):
# Not usually a good idea to pass in a datetime here (it loses
# information), but this can be a side-effect of interacting with a
# database backend (e.g. Oracle), so we'll be accommodating.
return value.time()
try:
parsed = parse_time(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_time"],
code="invalid_time",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.datetime.now().time()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts times into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_timefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.TimeField,
**kwargs,
}
)
class URLField(CharField):
default_validators = [validators.URLValidator()]
description = _("URL")
def __init__(self, verbose_name=None, name=None, **kwargs):
kwargs.setdefault("max_length", 200)
super().__init__(verbose_name, name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 200:
del kwargs["max_length"]
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed
# twice.
return super().formfield(
**{
"form_class": forms.URLField,
**kwargs,
}
)
class BinaryField(Field):
description = _("Raw binary data")
empty_values = [None, b""]
def __init__(self, *args, **kwargs):
kwargs.setdefault("editable", False)
super().__init__(*args, **kwargs)
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
return [*super().check(**kwargs), *self._check_str_default_value()]
def _check_str_default_value(self):
if self.has_default() and isinstance(self.default, str):
return [
checks.Error(
"BinaryField's default cannot be a string. Use bytes "
"content instead.",
obj=self,
id="fields.E170",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.editable:
kwargs["editable"] = True
else:
del kwargs["editable"]
return name, path, args, kwargs
def get_internal_type(self):
return "BinaryField"
def get_placeholder(self, value, compiler, connection):
return connection.ops.binary_placeholder_sql(value)
def get_default(self):
if self.has_default() and not callable(self.default):
return self.default
default = super().get_default()
if default == "":
return b""
return default
def get_db_prep_value(self, value, connection, prepared=False):
value = super().get_db_prep_value(value, connection, prepared)
if value is not None:
return connection.Database.Binary(value)
return value
def value_to_string(self, obj):
"""Binary data is serialized as base64"""
return b64encode(self.value_from_object(obj)).decode("ascii")
def to_python(self, value):
# If it's a string, it should be base64-encoded data
if isinstance(value, str):
return memoryview(b64decode(value.encode("ascii")))
return value
class UUIDField(Field):
default_error_messages = {
"invalid": _("“%(value)s” is not a valid UUID."),
}
description = _("Universally unique identifier")
empty_strings_allowed = False
def __init__(self, verbose_name=None, **kwargs):
kwargs["max_length"] = 32
super().__init__(verbose_name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return "UUIDField"
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
if not isinstance(value, uuid.UUID):
value = self.to_python(value)
if connection.features.has_native_uuid_field:
return value
return value.hex
def to_python(self, value):
if value is not None and not isinstance(value, uuid.UUID):
input_form = "int" if isinstance(value, int) else "hex"
try:
return uuid.UUID(**{input_form: value})
except (AttributeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
return value
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.UUIDField,
**kwargs,
}
)
class AutoFieldMixin:
db_returning = True
def __init__(self, *args, **kwargs):
kwargs["blank"] = True
super().__init__(*args, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_primary_key(),
]
def _check_primary_key(self):
if not self.primary_key:
return [
checks.Error(
"AutoFields must set primary_key=True.",
obj=self,
id="fields.E100",
),
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["blank"]
kwargs["primary_key"] = True
return name, path, args, kwargs
def validate(self, value, model_instance):
pass
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
value = connection.ops.validate_autopk_value(value)
return value
def contribute_to_class(self, cls, name, **kwargs):
if cls._meta.auto_field:
raise ValueError(
"Model %s can't have more than one auto-generated field."
% cls._meta.label
)
super().contribute_to_class(cls, name, **kwargs)
cls._meta.auto_field = self
def formfield(self, **kwargs):
return None
class AutoFieldMeta(type):
"""
Metaclass to maintain backward inheritance compatibility for AutoField.
It is intended that AutoFieldMixin become public API when it is possible to
create a non-integer automatically-generated field using column defaults
stored in the database.
In many areas Django also relies on using isinstance() to check for an
automatically-generated field as a subclass of AutoField. A new flag needs
to be implemented on Field to be used instead.
When these issues have been addressed, this metaclass could be used to
deprecate inheritance from AutoField and use of isinstance() with AutoField
for detecting automatically-generated fields.
"""
@property
def _subclasses(self):
return (BigAutoField, SmallAutoField)
def __instancecheck__(self, instance):
return isinstance(instance, self._subclasses) or super().__instancecheck__(
instance
)
def __subclasscheck__(self, subclass):
return issubclass(subclass, self._subclasses) or super().__subclasscheck__(
subclass
)
class AutoField(AutoFieldMixin, IntegerField, metaclass=AutoFieldMeta):
def get_internal_type(self):
return "AutoField"
def rel_db_type(self, connection):
return IntegerField().db_type(connection=connection)
class BigAutoField(AutoFieldMixin, BigIntegerField):
def get_internal_type(self):
return "BigAutoField"
def rel_db_type(self, connection):
return BigIntegerField().db_type(connection=connection)
class SmallAutoField(AutoFieldMixin, SmallIntegerField):
def get_internal_type(self):
return "SmallAutoField"
def rel_db_type(self, connection):
return SmallIntegerField().db_type(connection=connection)
|
422249d5d0ee45da6a3fde6bb4ceb9f7a69c9a99a0ff3b6801977e3dd48e629a | import logging
from datetime import datetime
from django.db.backends.ddl_references import (
Columns,
Expressions,
ForeignKeyName,
IndexName,
Statement,
Table,
)
from django.db.backends.utils import names_digest, split_identifier
from django.db.models import Deferrable, Index
from django.db.models.sql import Query
from django.db.transaction import TransactionManagementError, atomic
from django.utils import timezone
logger = logging.getLogger("django.db.backends.schema")
def _is_relevant_relation(relation, altered_field):
"""
When altering the given field, must constraints on its model from the given
relation be temporarily dropped?
"""
field = relation.field
if field.many_to_many:
# M2M reverse field
return False
if altered_field.primary_key and field.to_fields == [None]:
# Foreign key constraint on the primary key, which is being altered.
return True
# Is the constraint targeting the field being altered?
return altered_field.name in field.to_fields
def _all_related_fields(model):
return model._meta._get_fields(
forward=False,
reverse=True,
include_hidden=True,
include_parents=False,
)
def _related_non_m2m_objects(old_field, new_field):
# Filter out m2m objects from reverse relations.
# Return (old_relation, new_relation) tuples.
related_fields = zip(
(
obj
for obj in _all_related_fields(old_field.model)
if _is_relevant_relation(obj, old_field)
),
(
obj
for obj in _all_related_fields(new_field.model)
if _is_relevant_relation(obj, new_field)
),
)
for old_rel, new_rel in related_fields:
yield old_rel, new_rel
yield from _related_non_m2m_objects(
old_rel.remote_field,
new_rel.remote_field,
)
class BaseDatabaseSchemaEditor:
"""
This class and its subclasses are responsible for emitting schema-changing
statements to the databases - model creation/removal/alteration, field
renaming, index fiddling, and so on.
"""
# Overrideable SQL templates
sql_create_table = "CREATE TABLE %(table)s (%(definition)s)"
sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s"
sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s"
sql_delete_table = "DROP TABLE %(table)s CASCADE"
sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s"
sql_alter_column = "ALTER TABLE %(table)s %(changes)s"
sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s"
sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL"
sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL"
sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s"
sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT"
sql_alter_column_no_default_null = sql_alter_column_no_default
sql_alter_column_collate = "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s"
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE"
sql_rename_column = (
"ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s"
)
sql_update_with_default = (
"UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL"
)
sql_unique_constraint = "UNIQUE (%(columns)s)%(deferrable)s"
sql_check_constraint = "CHECK (%(check)s)"
sql_delete_constraint = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_constraint = "CONSTRAINT %(name)s %(constraint)s"
sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)"
sql_delete_check = sql_delete_constraint
sql_create_unique = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s "
"UNIQUE (%(columns)s)%(deferrable)s"
)
sql_delete_unique = sql_delete_constraint
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s"
)
sql_create_inline_fk = None
sql_create_column_inline_fk = None
sql_delete_fk = sql_delete_constraint
sql_create_index = (
"CREATE INDEX %(name)s ON %(table)s "
"(%(columns)s)%(include)s%(extra)s%(condition)s"
)
sql_create_unique_index = (
"CREATE UNIQUE INDEX %(name)s ON %(table)s "
"(%(columns)s)%(include)s%(condition)s"
)
sql_delete_index = "DROP INDEX %(name)s"
sql_create_pk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
)
sql_delete_pk = sql_delete_constraint
sql_delete_procedure = "DROP PROCEDURE %(procedure)s"
def __init__(self, connection, collect_sql=False, atomic=True):
self.connection = connection
self.collect_sql = collect_sql
if self.collect_sql:
self.collected_sql = []
self.atomic_migration = self.connection.features.can_rollback_ddl and atomic
# State-managing methods
def __enter__(self):
self.deferred_sql = []
if self.atomic_migration:
self.atomic = atomic(self.connection.alias)
self.atomic.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
for sql in self.deferred_sql:
self.execute(sql)
if self.atomic_migration:
self.atomic.__exit__(exc_type, exc_value, traceback)
# Core utility functions
def execute(self, sql, params=()):
"""Execute the given SQL statement, with optional parameters."""
# Don't perform the transactional DDL check if SQL is being collected
# as it's not going to be executed anyway.
if (
not self.collect_sql
and self.connection.in_atomic_block
and not self.connection.features.can_rollback_ddl
):
raise TransactionManagementError(
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
# Account for non-string statement objects.
sql = str(sql)
# Log the command we're running, then run it
logger.debug(
"%s; (params %r)", sql, params, extra={"params": params, "sql": sql}
)
if self.collect_sql:
ending = "" if sql.rstrip().endswith(";") else ";"
if params is not None:
self.collected_sql.append(
(sql % tuple(map(self.quote_value, params))) + ending
)
else:
self.collected_sql.append(sql + ending)
else:
with self.connection.cursor() as cursor:
cursor.execute(sql, params)
def quote_name(self, name):
return self.connection.ops.quote_name(name)
def table_sql(self, model):
"""Take a model and return its table definition."""
# Add any unique_togethers (always deferred, as some fields might be
# created afterward, like geometry fields with some backends).
for field_names in model._meta.unique_together:
fields = [model._meta.get_field(field) for field in field_names]
self.deferred_sql.append(self._create_unique_sql(model, fields))
# Create column SQL, add FK deferreds if needed.
column_sqls = []
params = []
for field in model._meta.local_fields:
# SQL.
definition, extra_params = self.column_sql(model, field)
if definition is None:
continue
# Check constraints can go on the column SQL here.
db_params = field.db_parameters(connection=self.connection)
if db_params["check"]:
definition += " " + self.sql_check_constraint % db_params
# Autoincrement SQL (for backends with inline variant).
col_type_suffix = field.db_type_suffix(connection=self.connection)
if col_type_suffix:
definition += " %s" % col_type_suffix
params.extend(extra_params)
# FK.
if field.remote_field and field.db_constraint:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(
field.remote_field.field_name
).column
if self.sql_create_inline_fk:
definition += " " + self.sql_create_inline_fk % {
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
}
elif self.connection.features.supports_foreign_keys:
self.deferred_sql.append(
self._create_fk_sql(
model, field, "_fk_%(to_table)s_%(to_column)s"
)
)
# Add the SQL to our big list.
column_sqls.append(
"%s %s"
% (
self.quote_name(field.column),
definition,
)
)
# Autoincrement SQL (for backends with post table definition
# variant).
if field.get_internal_type() in (
"AutoField",
"BigAutoField",
"SmallAutoField",
):
autoinc_sql = self.connection.ops.autoinc_sql(
model._meta.db_table, field.column
)
if autoinc_sql:
self.deferred_sql.extend(autoinc_sql)
constraints = [
constraint.constraint_sql(model, self)
for constraint in model._meta.constraints
]
sql = self.sql_create_table % {
"table": self.quote_name(model._meta.db_table),
"definition": ", ".join(
constraint for constraint in (*column_sqls, *constraints) if constraint
),
}
if model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(
model._meta.db_tablespace
)
if tablespace_sql:
sql += " " + tablespace_sql
return sql, params
# Field <-> database mapping functions
def _iter_column_sql(self, column_db_type, params, model, field, include_default):
yield column_db_type
collation = getattr(field, "db_collation", None)
if collation:
yield self._collate_sql(collation)
# Work out nullability.
null = field.null
# Include a default value, if requested.
include_default = (
include_default
and not self.skip_default(field)
and
# Don't include a default value if it's a nullable field and the
# default cannot be dropped in the ALTER COLUMN statement (e.g.
# MySQL longtext and longblob).
not (null and self.skip_default_on_alter(field))
)
if include_default:
default_value = self.effective_default(field)
if default_value is not None:
column_default = "DEFAULT " + self._column_default_sql(field)
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (Oracle).
# If this is the case, the individual schema backend should
# implement prepare_default().
yield column_default % self.prepare_default(default_value)
else:
yield column_default
params.append(default_value)
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if (
field.empty_strings_allowed
and not field.primary_key
and self.connection.features.interprets_empty_strings_as_nulls
):
null = True
if not null:
yield "NOT NULL"
elif not self.connection.features.implied_column_null:
yield "NULL"
if field.primary_key:
yield "PRIMARY KEY"
elif field.unique:
yield "UNIQUE"
# Optionally add the tablespace if it's an implicitly indexed column.
tablespace = field.db_tablespace or model._meta.db_tablespace
if (
tablespace
and self.connection.features.supports_tablespaces
and field.unique
):
yield self.connection.ops.tablespace_sql(tablespace, inline=True)
def column_sql(self, model, field, include_default=False):
"""
Return the column definition for a field. The field must already have
had set_attributes_from_name() called.
"""
# Get the column's type and use that as the basis of the SQL.
db_params = field.db_parameters(connection=self.connection)
column_db_type = db_params["type"]
# Check for fields that aren't actually columns (e.g. M2M).
if column_db_type is None:
return None, None
params = []
return (
" ".join(
# This appends to the params being returned.
self._iter_column_sql(
column_db_type, params, model, field, include_default
)
),
params,
)
def skip_default(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob).
"""
return False
def skip_default_on_alter(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob) in the ALTER COLUMN statement.
"""
return False
def prepare_default(self, value):
"""
Only used for backends which have requires_literal_defaults feature
"""
raise NotImplementedError(
"subclasses of BaseDatabaseSchemaEditor for backends which have "
"requires_literal_defaults must provide a prepare_default() method"
)
def _column_default_sql(self, field):
"""
Return the SQL to use in a DEFAULT clause. The resulting string should
contain a '%s' placeholder for a default value.
"""
return "%s"
@staticmethod
def _effective_default(field):
# This method allows testing its logic without a connection.
if field.has_default():
default = field.get_default()
elif not field.null and field.blank and field.empty_strings_allowed:
if field.get_internal_type() == "BinaryField":
default = b""
else:
default = ""
elif getattr(field, "auto_now", False) or getattr(field, "auto_now_add", False):
internal_type = field.get_internal_type()
if internal_type == "DateTimeField":
default = timezone.now()
else:
default = datetime.now()
if internal_type == "DateField":
default = default.date()
elif internal_type == "TimeField":
default = default.time()
else:
default = None
return default
def effective_default(self, field):
"""Return a field's effective database default value."""
return field.get_db_prep_save(self._effective_default(field), self.connection)
def quote_value(self, value):
"""
Return a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends (defaults are not user-defined, though,
so this is safe).
"""
raise NotImplementedError()
# Actions
def create_model(self, model):
"""
Create a table and any accompanying indexes or unique constraints for
the given `model`.
"""
sql, params = self.table_sql(model)
# Prevent using [] as params, in the case a literal '%' is used in the
# definition.
self.execute(sql, params or None)
# Add any field index and index_together's (deferred as SQLite
# _remake_table needs it).
self.deferred_sql.extend(self._model_indexes_sql(model))
# Make M2M tables
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.create_model(field.remote_field.through)
def delete_model(self, model):
"""Delete a model from the database."""
# Handle auto-created intermediary models
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.delete_model(field.remote_field.through)
# Delete the table
self.execute(
self.sql_delete_table
% {
"table": self.quote_name(model._meta.db_table),
}
)
# Remove all deferred statements referencing the deleted table.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_table(
model._meta.db_table
):
self.deferred_sql.remove(sql)
def add_index(self, model, index):
"""Add an index on a model."""
if (
index.contains_expressions
and not self.connection.features.supports_expression_indexes
):
return None
# Index.create_sql returns interpolated SQL which makes params=None a
# necessity to avoid escaping attempts on execution.
self.execute(index.create_sql(model, self), params=None)
def remove_index(self, model, index):
"""Remove an index from a model."""
if (
index.contains_expressions
and not self.connection.features.supports_expression_indexes
):
return None
self.execute(index.remove_sql(model, self))
def add_constraint(self, model, constraint):
"""Add a constraint to a model."""
sql = constraint.create_sql(model, self)
if sql:
# Constraint.create_sql returns interpolated SQL which makes
# params=None a necessity to avoid escaping attempts on execution.
self.execute(sql, params=None)
def remove_constraint(self, model, constraint):
"""Remove a constraint from a model."""
sql = constraint.remove_sql(model, self)
if sql:
self.execute(sql)
def alter_unique_together(self, model, old_unique_together, new_unique_together):
"""
Deal with a model changing its unique_together. The input
unique_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_unique_together}
news = {tuple(fields) for fields in new_unique_together}
# Deleted uniques
for fields in olds.difference(news):
self._delete_composed_index(
model, fields, {"unique": True}, self.sql_delete_unique
)
# Created uniques
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_unique_sql(model, fields))
def alter_index_together(self, model, old_index_together, new_index_together):
"""
Deal with a model changing its index_together. The input
index_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_index_together}
news = {tuple(fields) for fields in new_index_together}
# Deleted indexes
for fields in olds.difference(news):
self._delete_composed_index(
model,
fields,
{"index": True, "unique": False},
self.sql_delete_index,
)
# Created indexes
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_index_sql(model, fields=fields, suffix="_idx"))
def _delete_composed_index(self, model, fields, constraint_kwargs, sql):
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
meta_index_names = {constraint.name for constraint in model._meta.indexes}
columns = [model._meta.get_field(field).column for field in fields]
constraint_names = self._constraint_names(
model,
columns,
exclude=meta_constraint_names | meta_index_names,
**constraint_kwargs,
)
if len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of constraints for %s(%s)"
% (
len(constraint_names),
model._meta.db_table,
", ".join(columns),
)
)
self.execute(self._delete_constraint_sql(sql, model, constraint_names[0]))
def alter_db_table(self, model, old_db_table, new_db_table):
"""Rename the table a model points to."""
if old_db_table == new_db_table or (
self.connection.features.ignores_table_name_case
and old_db_table.lower() == new_db_table.lower()
):
return
self.execute(
self.sql_rename_table
% {
"old_table": self.quote_name(old_db_table),
"new_table": self.quote_name(new_db_table),
}
)
# Rename all references to the old table name.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_table_references(old_db_table, new_db_table)
def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace):
"""Move a model's table between tablespaces."""
self.execute(
self.sql_retablespace_table
% {
"table": self.quote_name(model._meta.db_table),
"old_tablespace": self.quote_name(old_db_tablespace),
"new_tablespace": self.quote_name(new_db_tablespace),
}
)
def add_field(self, model, field):
"""
Create a field on a model. Usually involves adding a column, but may
involve adding a table instead (for M2M fields).
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.create_model(field.remote_field.through)
# Get the column's definition
definition, params = self.column_sql(model, field, include_default=True)
# It might not actually have a column behind it
if definition is None:
return
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params["check"]:
definition += " " + self.sql_check_constraint % db_params
if (
field.remote_field
and self.connection.features.supports_foreign_keys
and field.db_constraint
):
constraint_suffix = "_fk_%(to_table)s_%(to_column)s"
# Add FK constraint inline, if supported.
if self.sql_create_column_inline_fk:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(
field.remote_field.field_name
).column
namespace, _ = split_identifier(model._meta.db_table)
definition += " " + self.sql_create_column_inline_fk % {
"name": self._fk_constraint_name(model, field, constraint_suffix),
"namespace": "%s." % self.quote_name(namespace)
if namespace
else "",
"column": self.quote_name(field.column),
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
"deferrable": self.connection.ops.deferrable_sql(),
}
# Otherwise, add FK constraints later.
else:
self.deferred_sql.append(
self._create_fk_sql(model, field, constraint_suffix)
)
# Build the SQL and run it
sql = self.sql_create_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
"definition": definition,
}
self.execute(sql, params)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if (
not self.skip_default_on_alter(field)
and self.effective_default(field) is not None
):
changes_sql, params = self._alter_column_default_sql(
model, None, field, drop=True
)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Add an index, if required
self.deferred_sql.extend(self._field_indexes_sql(model, field))
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def remove_field(self, model, field):
"""
Remove a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.delete_model(field.remote_field.through)
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)["type"] is None:
return
# Drop any FK constraints, MySQL requires explicit deletion
if field.remote_field:
fk_names = self._constraint_names(model, [field.column], foreign_key=True)
for fk_name in fk_names:
self.execute(self._delete_fk_sql(model, fk_name))
# Delete the column
sql = self.sql_delete_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
# Remove all deferred statements referencing the deleted column.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_column(
model._meta.db_table, field.column
):
self.deferred_sql.remove(sql)
def alter_field(self, model, old_field, new_field, strict=False):
"""
Allow a field's type, uniqueness, nullability, default, column,
constraints, etc. to be modified.
`old_field` is required to compute the necessary changes.
If `strict` is True, raise errors if the old column does not match
`old_field` precisely.
"""
if not self._field_should_be_altered(old_field, new_field):
return
# Ensure this field is even column-based
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params["type"]
new_db_params = new_field.db_parameters(connection=self.connection)
new_type = new_db_params["type"]
if (old_type is None and old_field.remote_field is None) or (
new_type is None and new_field.remote_field is None
):
raise ValueError(
"Cannot alter field %s into %s - they do not properly define "
"db_type (are you using a badly-written custom field?)"
% (old_field, new_field),
)
elif (
old_type is None
and new_type is None
and (
old_field.remote_field.through
and new_field.remote_field.through
and old_field.remote_field.through._meta.auto_created
and new_field.remote_field.through._meta.auto_created
)
):
return self._alter_many_to_many(model, old_field, new_field, strict)
elif (
old_type is None
and new_type is None
and (
old_field.remote_field.through
and new_field.remote_field.through
and not old_field.remote_field.through._meta.auto_created
and not new_field.remote_field.through._meta.auto_created
)
):
# Both sides have through models; this is a no-op.
return
elif old_type is None or new_type is None:
raise ValueError(
"Cannot alter field %s into %s - they are not compatible types "
"(you cannot alter to or from M2M fields, or add or remove "
"through= on M2M fields)" % (old_field, new_field)
)
self._alter_field(
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict,
)
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
"""Perform a "physical" (non-ManyToMany) field update."""
# Drop any FK constraints, we'll remake them later
fks_dropped = set()
if (
self.connection.features.supports_foreign_keys
and old_field.remote_field
and old_field.db_constraint
):
fk_names = self._constraint_names(
model, [old_field.column], foreign_key=True
)
if strict and len(fk_names) != 1:
raise ValueError(
"Found wrong number (%s) of foreign key constraints for %s.%s"
% (
len(fk_names),
model._meta.db_table,
old_field.column,
)
)
for fk_name in fk_names:
fks_dropped.add((old_field.column,))
self.execute(self._delete_fk_sql(model, fk_name))
# Has unique been removed?
if old_field.unique and (
not new_field.unique or self._field_became_primary_key(old_field, new_field)
):
# Find the unique constraint for this field
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
constraint_names = self._constraint_names(
model,
[old_field.column],
unique=True,
primary_key=False,
exclude=meta_constraint_names,
)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of unique constraints for %s.%s"
% (
len(constraint_names),
model._meta.db_table,
old_field.column,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_unique_sql(model, constraint_name))
# Drop incoming FK constraints if the field is a primary key or unique,
# which might be a to_field target, and things are going to change.
drop_foreign_keys = (
self.connection.features.supports_foreign_keys
and (
(old_field.primary_key and new_field.primary_key)
or (old_field.unique and new_field.unique)
)
and old_type != new_type
)
if drop_foreign_keys:
# '_meta.related_field' also contains M2M reverse fields, these
# will be filtered out
for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field):
rel_fk_names = self._constraint_names(
new_rel.related_model, [new_rel.field.column], foreign_key=True
)
for fk_name in rel_fk_names:
self.execute(self._delete_fk_sql(new_rel.related_model, fk_name))
# Removed an index? (no strict check, as multiple indexes are possible)
# Remove indexes if db_index switched to False or a unique constraint
# will now be used in lieu of an index. The following lines from the
# truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# True | False | False | False
# True | False | False | True
# True | False | True | True
if (
old_field.db_index
and not old_field.unique
and (not new_field.db_index or new_field.unique)
):
# Find the index for this field
meta_index_names = {index.name for index in model._meta.indexes}
# Retrieve only BTREE indexes since this is what's created with
# db_index=True.
index_names = self._constraint_names(
model,
[old_field.column],
index=True,
type_=Index.suffix,
exclude=meta_index_names,
)
for index_name in index_names:
# The only way to check if an index was created with
# db_index=True or with Index(['field'], name='foo')
# is to look at its name (refs #28053).
self.execute(self._delete_index_sql(model, index_name))
# Change check constraints?
if old_db_params["check"] != new_db_params["check"] and old_db_params["check"]:
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
constraint_names = self._constraint_names(
model,
[old_field.column],
check=True,
exclude=meta_constraint_names,
)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of check constraints for %s.%s"
% (
len(constraint_names),
model._meta.db_table,
old_field.column,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_check_sql(model, constraint_name))
# Have they renamed the column?
if old_field.column != new_field.column:
self.execute(
self._rename_field_sql(
model._meta.db_table, old_field, new_field, new_type
)
)
# Rename all references to the renamed column.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_column_references(
model._meta.db_table, old_field.column, new_field.column
)
# Next, start accumulating actions to do
actions = []
null_actions = []
post_actions = []
# Collation change?
old_collation = getattr(old_field, "db_collation", None)
new_collation = getattr(new_field, "db_collation", None)
if old_collation != new_collation:
# Collation change handles also a type change.
fragment = self._alter_column_collation_sql(
model, new_field, new_type, new_collation
)
actions.append(fragment)
# Type change?
elif old_type != new_type:
fragment, other_actions = self._alter_column_type_sql(
model, old_field, new_field, new_type
)
actions.append(fragment)
post_actions.extend(other_actions)
# When changing a column NULL constraint to NOT NULL with a given
# default value, we need to perform 4 steps:
# 1. Add a default for new incoming writes
# 2. Update existing NULL rows with new default
# 3. Replace NULL constraint with NOT NULL
# 4. Drop the default again.
# Default change?
needs_database_default = False
if old_field.null and not new_field.null:
old_default = self.effective_default(old_field)
new_default = self.effective_default(new_field)
if (
not self.skip_default_on_alter(new_field)
and old_default != new_default
and new_default is not None
):
needs_database_default = True
actions.append(
self._alter_column_default_sql(model, old_field, new_field)
)
# Nullability change?
if old_field.null != new_field.null:
fragment = self._alter_column_null_sql(model, old_field, new_field)
if fragment:
null_actions.append(fragment)
# Only if we have a default and there is a change from NULL to NOT NULL
four_way_default_alteration = new_field.has_default() and (
old_field.null and not new_field.null
)
if actions or null_actions:
if not four_way_default_alteration:
# If we don't have to do a 4-way default alteration we can
# directly run a (NOT) NULL alteration
actions = actions + null_actions
# Combine actions together if we can (e.g. postgres)
if self.connection.features.supports_combined_alters and actions:
sql, params = tuple(zip(*actions))
actions = [(", ".join(sql), sum(params, []))]
# Apply those actions
for sql, params in actions:
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if four_way_default_alteration:
# Update existing rows with default value
self.execute(
self.sql_update_with_default
% {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
)
# Since we didn't run a NOT NULL change before we need to do it
# now
for sql, params in null_actions:
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if post_actions:
for sql, params in post_actions:
self.execute(sql, params)
# If primary_key changed to False, delete the primary key constraint.
if old_field.primary_key and not new_field.primary_key:
self._delete_primary_key(model, strict)
# Added a unique?
if self._unique_should_be_added(old_field, new_field):
self.execute(self._create_unique_sql(model, [new_field]))
# Added an index? Add an index if db_index switched to True or a unique
# constraint will no longer be used in lieu of an index. The following
# lines from the truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# False | False | True | False
# False | True | True | False
# True | True | True | False
if (
(not old_field.db_index or old_field.unique)
and new_field.db_index
and not new_field.unique
):
self.execute(self._create_index_sql(model, fields=[new_field]))
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = []
if drop_foreign_keys:
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Changed to become primary key?
if self._field_became_primary_key(old_field, new_field):
# Make the new one
self.execute(self._create_primary_key_sql(model, new_field))
# Update all referencing columns
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Handle our type alters on the other end of rels from the PK stuff above
for old_rel, new_rel in rels_to_update:
rel_db_params = new_rel.field.db_parameters(connection=self.connection)
rel_type = rel_db_params["type"]
fragment, other_actions = self._alter_column_type_sql(
new_rel.related_model, old_rel.field, new_rel.field, rel_type
)
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(new_rel.related_model._meta.db_table),
"changes": fragment[0],
},
fragment[1],
)
for sql, params in other_actions:
self.execute(sql, params)
# Does it have a foreign key?
if (
self.connection.features.supports_foreign_keys
and new_field.remote_field
and (
fks_dropped or not old_field.remote_field or not old_field.db_constraint
)
and new_field.db_constraint
):
self.execute(
self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s")
)
# Rebuild FKs that pointed to us if we previously had to drop them
if drop_foreign_keys:
for _, rel in rels_to_update:
if rel.field.db_constraint:
self.execute(
self._create_fk_sql(rel.related_model, rel.field, "_fk")
)
# Does it have check constraints we need to add?
if old_db_params["check"] != new_db_params["check"] and new_db_params["check"]:
constraint_name = self._create_index_name(
model._meta.db_table, [new_field.column], suffix="_check"
)
self.execute(
self._create_check_sql(model, constraint_name, new_db_params["check"])
)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if needs_database_default:
changes_sql, params = self._alter_column_default_sql(
model, old_field, new_field, drop=True
)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def _alter_column_null_sql(self, model, old_field, new_field):
"""
Hook to specialize column null alteration.
Return a (sql, params) fragment to set a column to null or non-null
as required by new_field, or None if no changes are required.
"""
if (
self.connection.features.interprets_empty_strings_as_nulls
and new_field.empty_strings_allowed
):
# The field is nullable in the database anyway, leave it alone.
return
else:
new_db_params = new_field.db_parameters(connection=self.connection)
sql = (
self.sql_alter_column_null
if new_field.null
else self.sql_alter_column_not_null
)
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_db_params["type"],
},
[],
)
def _alter_column_default_sql(self, model, old_field, new_field, drop=False):
"""
Hook to specialize column default alteration.
Return a (sql, params) fragment to add or drop (depending on the drop
argument) a default to new_field's column.
"""
new_default = self.effective_default(new_field)
default = self._column_default_sql(new_field)
params = [new_default]
if drop:
params = []
elif self.connection.features.requires_literal_defaults:
# Some databases (Oracle) can't take defaults as a parameter
# If this is the case, the SchemaEditor for that database should
# implement prepare_default().
default = self.prepare_default(new_default)
params = []
new_db_params = new_field.db_parameters(connection=self.connection)
if drop:
if new_field.null:
sql = self.sql_alter_column_no_default_null
else:
sql = self.sql_alter_column_no_default
else:
sql = self.sql_alter_column_default
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_db_params["type"],
"default": default,
},
params,
)
def _alter_column_type_sql(self, model, old_field, new_field, new_type):
"""
Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields).
Return a two-tuple of: an SQL fragment of (sql, params) to insert into
an ALTER TABLE statement and a list of extra (sql, params) tuples to
run once the field is altered.
"""
return (
(
self.sql_alter_column_type
% {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
),
[],
)
def _alter_column_collation_sql(self, model, new_field, new_type, new_collation):
return (
self.sql_alter_column_collate
% {
"column": self.quote_name(new_field.column),
"type": new_type,
"collation": " " + self._collate_sql(new_collation)
if new_collation
else "",
},
[],
)
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""Alter M2Ms to repoint their to= endpoints."""
# Rename the through table
if (
old_field.remote_field.through._meta.db_table
!= new_field.remote_field.through._meta.db_table
):
self.alter_db_table(
old_field.remote_field.through,
old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table,
)
# Repoint the FK to the other side
self.alter_field(
new_field.remote_field.through,
# The field that points to the target model is needed, so we can
# tell alter_field to change it - this is m2m_reverse_field_name()
# (as opposed to m2m_field_name(), which points to our model).
old_field.remote_field.through._meta.get_field(
old_field.m2m_reverse_field_name()
),
new_field.remote_field.through._meta.get_field(
new_field.m2m_reverse_field_name()
),
)
self.alter_field(
new_field.remote_field.through,
# for self-referential models we need to alter field from the other end too
old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()),
)
def _create_index_name(self, table_name, column_names, suffix=""):
"""
Generate a unique name for an index/unique constraint.
The name is divided into 3 parts: the table name, the column names,
and a unique digest and suffix.
"""
_, table_name = split_identifier(table_name)
hash_suffix_part = "%s%s" % (
names_digest(table_name, *column_names, length=8),
suffix,
)
max_length = self.connection.ops.max_name_length() or 200
# If everything fits into max_length, use that name.
index_name = "%s_%s_%s" % (table_name, "_".join(column_names), hash_suffix_part)
if len(index_name) <= max_length:
return index_name
# Shorten a long suffix.
if len(hash_suffix_part) > max_length / 3:
hash_suffix_part = hash_suffix_part[: max_length // 3]
other_length = (max_length - len(hash_suffix_part)) // 2 - 1
index_name = "%s_%s_%s" % (
table_name[:other_length],
"_".join(column_names)[:other_length],
hash_suffix_part,
)
# Prepend D if needed to prevent the name from starting with an
# underscore or a number (not permitted on Oracle).
if index_name[0] == "_" or index_name[0].isdigit():
index_name = "D%s" % index_name[:-1]
return index_name
def _get_index_tablespace_sql(self, model, fields, db_tablespace=None):
if db_tablespace is None:
if len(fields) == 1 and fields[0].db_tablespace:
db_tablespace = fields[0].db_tablespace
elif model._meta.db_tablespace:
db_tablespace = model._meta.db_tablespace
if db_tablespace is not None:
return " " + self.connection.ops.tablespace_sql(db_tablespace)
return ""
def _index_condition_sql(self, condition):
if condition:
return " WHERE " + condition
return ""
def _index_include_sql(self, model, columns):
if not columns or not self.connection.features.supports_covering_indexes:
return ""
return Statement(
" INCLUDE (%(columns)s)",
columns=Columns(model._meta.db_table, columns, self.quote_name),
)
def _create_index_sql(
self,
model,
*,
fields=None,
name=None,
suffix="",
using="",
db_tablespace=None,
col_suffixes=(),
sql=None,
opclasses=(),
condition=None,
include=None,
expressions=None,
):
"""
Return the SQL statement to create the index for one or several fields
or expressions. `sql` can be specified if the syntax differs from the
standard (GIS indexes, ...).
"""
fields = fields or []
expressions = expressions or []
compiler = Query(model, alias_cols=False).get_compiler(
connection=self.connection,
)
tablespace_sql = self._get_index_tablespace_sql(
model, fields, db_tablespace=db_tablespace
)
columns = [field.column for field in fields]
sql_create_index = sql or self.sql_create_index
table = model._meta.db_table
def create_index_name(*args, **kwargs):
nonlocal name
if name is None:
name = self._create_index_name(*args, **kwargs)
return self.quote_name(name)
return Statement(
sql_create_index,
table=Table(table, self.quote_name),
name=IndexName(table, columns, suffix, create_index_name),
using=using,
columns=(
self._index_columns(table, columns, col_suffixes, opclasses)
if columns
else Expressions(table, expressions, compiler, self.quote_value)
),
extra=tablespace_sql,
condition=self._index_condition_sql(condition),
include=self._index_include_sql(model, include),
)
def _delete_index_sql(self, model, name, sql=None):
return Statement(
sql or self.sql_delete_index,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
)
def _index_columns(self, table, columns, col_suffixes, opclasses):
return Columns(table, columns, self.quote_name, col_suffixes=col_suffixes)
def _model_indexes_sql(self, model):
"""
Return a list of all index SQL statements (field indexes,
index_together, Meta.indexes) for the specified model.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
for field in model._meta.local_fields:
output.extend(self._field_indexes_sql(model, field))
for field_names in model._meta.index_together:
fields = [model._meta.get_field(field) for field in field_names]
output.append(self._create_index_sql(model, fields=fields, suffix="_idx"))
for index in model._meta.indexes:
if (
not index.contains_expressions
or self.connection.features.supports_expression_indexes
):
output.append(index.create_sql(model, self))
return output
def _field_indexes_sql(self, model, field):
"""
Return a list of all index SQL statements for the specified field.
"""
output = []
if self._field_should_be_indexed(model, field):
output.append(self._create_index_sql(model, fields=[field]))
return output
def _field_should_be_altered(self, old_field, new_field):
_, old_path, old_args, old_kwargs = old_field.deconstruct()
_, new_path, new_args, new_kwargs = new_field.deconstruct()
# Don't alter when:
# - changing only a field name
# - changing an attribute that doesn't affect the schema
# - adding only a db_column and the column name is not changed
for attr in old_field.non_db_attrs:
old_kwargs.pop(attr, None)
for attr in new_field.non_db_attrs:
new_kwargs.pop(attr, None)
return self.quote_name(old_field.column) != self.quote_name(
new_field.column
) or (old_path, old_args, old_kwargs) != (new_path, new_args, new_kwargs)
def _field_should_be_indexed(self, model, field):
return field.db_index and not field.unique
def _field_became_primary_key(self, old_field, new_field):
return not old_field.primary_key and new_field.primary_key
def _unique_should_be_added(self, old_field, new_field):
return (
not new_field.primary_key
and new_field.unique
and (not old_field.unique or old_field.primary_key)
)
def _rename_field_sql(self, table, old_field, new_field, new_type):
return self.sql_rename_column % {
"table": self.quote_name(table),
"old_column": self.quote_name(old_field.column),
"new_column": self.quote_name(new_field.column),
"type": new_type,
}
def _create_fk_sql(self, model, field, suffix):
table = Table(model._meta.db_table, self.quote_name)
name = self._fk_constraint_name(model, field, suffix)
column = Columns(model._meta.db_table, [field.column], self.quote_name)
to_table = Table(field.target_field.model._meta.db_table, self.quote_name)
to_column = Columns(
field.target_field.model._meta.db_table,
[field.target_field.column],
self.quote_name,
)
deferrable = self.connection.ops.deferrable_sql()
return Statement(
self.sql_create_fk,
table=table,
name=name,
column=column,
to_table=to_table,
to_column=to_column,
deferrable=deferrable,
)
def _fk_constraint_name(self, model, field, suffix):
def create_fk_name(*args, **kwargs):
return self.quote_name(self._create_index_name(*args, **kwargs))
return ForeignKeyName(
model._meta.db_table,
[field.column],
split_identifier(field.target_field.model._meta.db_table)[1],
[field.target_field.column],
suffix,
create_fk_name,
)
def _delete_fk_sql(self, model, name):
return self._delete_constraint_sql(self.sql_delete_fk, model, name)
def _deferrable_constraint_sql(self, deferrable):
if deferrable is None:
return ""
if deferrable == Deferrable.DEFERRED:
return " DEFERRABLE INITIALLY DEFERRED"
if deferrable == Deferrable.IMMEDIATE:
return " DEFERRABLE INITIALLY IMMEDIATE"
def _unique_sql(
self,
model,
fields,
name,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
):
return None
if condition or include or opclasses or expressions:
# Databases support conditional, covering, and functional unique
# constraints via a unique index.
sql = self._create_unique_sql(
model,
fields,
name=name,
condition=condition,
include=include,
opclasses=opclasses,
expressions=expressions,
)
if sql:
self.deferred_sql.append(sql)
return None
constraint = self.sql_unique_constraint % {
"columns": ", ".join([self.quote_name(field.column) for field in fields]),
"deferrable": self._deferrable_constraint_sql(deferrable),
}
return self.sql_constraint % {
"name": self.quote_name(name),
"constraint": constraint,
}
def _create_unique_sql(
self,
model,
fields,
name=None,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
(
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
)
or (condition and not self.connection.features.supports_partial_indexes)
or (include and not self.connection.features.supports_covering_indexes)
or (
expressions and not self.connection.features.supports_expression_indexes
)
):
return None
def create_unique_name(*args, **kwargs):
return self.quote_name(self._create_index_name(*args, **kwargs))
compiler = Query(model, alias_cols=False).get_compiler(
connection=self.connection
)
table = model._meta.db_table
columns = [field.column for field in fields]
if name is None:
name = IndexName(table, columns, "_uniq", create_unique_name)
else:
name = self.quote_name(name)
if condition or include or opclasses or expressions:
sql = self.sql_create_unique_index
else:
sql = self.sql_create_unique
if columns:
columns = self._index_columns(
table, columns, col_suffixes=(), opclasses=opclasses
)
else:
columns = Expressions(table, expressions, compiler, self.quote_value)
return Statement(
sql,
table=Table(table, self.quote_name),
name=name,
columns=columns,
condition=self._index_condition_sql(condition),
deferrable=self._deferrable_constraint_sql(deferrable),
include=self._index_include_sql(model, include),
)
def _delete_unique_sql(
self,
model,
name,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
(
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
)
or (condition and not self.connection.features.supports_partial_indexes)
or (include and not self.connection.features.supports_covering_indexes)
or (
expressions and not self.connection.features.supports_expression_indexes
)
):
return None
if condition or include or opclasses or expressions:
sql = self.sql_delete_index
else:
sql = self.sql_delete_unique
return self._delete_constraint_sql(sql, model, name)
def _check_sql(self, name, check):
return self.sql_constraint % {
"name": self.quote_name(name),
"constraint": self.sql_check_constraint % {"check": check},
}
def _create_check_sql(self, model, name, check):
return Statement(
self.sql_create_check,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
check=check,
)
def _delete_check_sql(self, model, name):
return self._delete_constraint_sql(self.sql_delete_check, model, name)
def _delete_constraint_sql(self, template, model, name):
return Statement(
template,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
)
def _constraint_names(
self,
model,
column_names=None,
unique=None,
primary_key=None,
index=None,
foreign_key=None,
check=None,
type_=None,
exclude=None,
):
"""Return all constraint names matching the columns and conditions."""
if column_names is not None:
column_names = [
self.connection.introspection.identifier_converter(name)
for name in column_names
]
with self.connection.cursor() as cursor:
constraints = self.connection.introspection.get_constraints(
cursor, model._meta.db_table
)
result = []
for name, infodict in constraints.items():
if column_names is None or column_names == infodict["columns"]:
if unique is not None and infodict["unique"] != unique:
continue
if primary_key is not None and infodict["primary_key"] != primary_key:
continue
if index is not None and infodict["index"] != index:
continue
if check is not None and infodict["check"] != check:
continue
if foreign_key is not None and not infodict["foreign_key"]:
continue
if type_ is not None and infodict["type"] != type_:
continue
if not exclude or name not in exclude:
result.append(name)
return result
def _delete_primary_key(self, model, strict=False):
constraint_names = self._constraint_names(model, primary_key=True)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of PK constraints for %s"
% (
len(constraint_names),
model._meta.db_table,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_primary_key_sql(model, constraint_name))
def _create_primary_key_sql(self, model, field):
return Statement(
self.sql_create_pk,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(
self._create_index_name(
model._meta.db_table, [field.column], suffix="_pk"
)
),
columns=Columns(model._meta.db_table, [field.column], self.quote_name),
)
def _delete_primary_key_sql(self, model, name):
return self._delete_constraint_sql(self.sql_delete_pk, model, name)
def _collate_sql(self, collation):
return "COLLATE " + self.quote_name(collation)
def remove_procedure(self, procedure_name, param_types=()):
sql = self.sql_delete_procedure % {
"procedure": self.quote_name(procedure_name),
"param_types": ",".join(param_types),
}
self.execute(sql)
|
3af729dbe3947d302a8fec378a606cc279943369a731183e3d768fe86f1f4f0a | import logging
import os
import unittest
import warnings
from io import StringIO
from unittest import mock
from django.conf import settings
from django.contrib.staticfiles.finders import get_finder, get_finders
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import default_storage
from django.db import (
IntegrityError,
connection,
connections,
models,
router,
transaction,
)
from django.forms import (
CharField,
EmailField,
Form,
IntegerField,
ValidationError,
formset_factory,
)
from django.http import HttpResponse
from django.template.loader import render_to_string
from django.test import (
SimpleTestCase,
TestCase,
TransactionTestCase,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.html import HTMLParseError, parse_html
from django.test.testcases import DatabaseOperationForbidden
from django.test.utils import (
CaptureQueriesContext,
TestContextDecorator,
ignore_warnings,
isolate_apps,
override_settings,
setup_test_environment,
)
from django.urls import NoReverseMatch, path, reverse, reverse_lazy
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.log import DEFAULT_LOGGING
from django.utils.version import PY311
from .models import Car, Person, PossessedCar
from .views import empty_response
class SkippingTestCase(SimpleTestCase):
def _assert_skipping(self, func, expected_exc, msg=None):
try:
if msg is not None:
with self.assertRaisesMessage(expected_exc, msg):
func()
else:
with self.assertRaises(expected_exc):
func()
except unittest.SkipTest:
self.fail("%s should not result in a skipped test." % func.__name__)
def test_skip_unless_db_feature(self):
"""
Testing the django.test.skipUnlessDBFeature decorator.
"""
# Total hack, but it works, just want an attribute that's always true.
@skipUnlessDBFeature("__class__")
def test_func():
raise ValueError
@skipUnlessDBFeature("notprovided")
def test_func2():
raise ValueError
@skipUnlessDBFeature("__class__", "__class__")
def test_func3():
raise ValueError
@skipUnlessDBFeature("__class__", "notprovided")
def test_func4():
raise ValueError
self._assert_skipping(test_func, ValueError)
self._assert_skipping(test_func2, unittest.SkipTest)
self._assert_skipping(test_func3, ValueError)
self._assert_skipping(test_func4, unittest.SkipTest)
class SkipTestCase(SimpleTestCase):
@skipUnlessDBFeature("missing")
def test_foo(self):
pass
self._assert_skipping(
SkipTestCase("test_foo").test_foo,
ValueError,
"skipUnlessDBFeature cannot be used on test_foo (test_utils.tests."
"SkippingTestCase.test_skip_unless_db_feature.<locals>.SkipTestCase%s) "
"as SkippingTestCase.test_skip_unless_db_feature.<locals>.SkipTestCase "
"doesn't allow queries against the 'default' database."
# Python 3.11 uses fully qualified test name in the output.
% (".test_foo" if PY311 else ""),
)
def test_skip_if_db_feature(self):
"""
Testing the django.test.skipIfDBFeature decorator.
"""
@skipIfDBFeature("__class__")
def test_func():
raise ValueError
@skipIfDBFeature("notprovided")
def test_func2():
raise ValueError
@skipIfDBFeature("__class__", "__class__")
def test_func3():
raise ValueError
@skipIfDBFeature("__class__", "notprovided")
def test_func4():
raise ValueError
@skipIfDBFeature("notprovided", "notprovided")
def test_func5():
raise ValueError
self._assert_skipping(test_func, unittest.SkipTest)
self._assert_skipping(test_func2, ValueError)
self._assert_skipping(test_func3, unittest.SkipTest)
self._assert_skipping(test_func4, unittest.SkipTest)
self._assert_skipping(test_func5, ValueError)
class SkipTestCase(SimpleTestCase):
@skipIfDBFeature("missing")
def test_foo(self):
pass
self._assert_skipping(
SkipTestCase("test_foo").test_foo,
ValueError,
"skipIfDBFeature cannot be used on test_foo (test_utils.tests."
"SkippingTestCase.test_skip_if_db_feature.<locals>.SkipTestCase%s) "
"as SkippingTestCase.test_skip_if_db_feature.<locals>.SkipTestCase "
"doesn't allow queries against the 'default' database."
# Python 3.11 uses fully qualified test name in the output.
% (".test_foo" if PY311 else ""),
)
class SkippingClassTestCase(TestCase):
def test_skip_class_unless_db_feature(self):
@skipUnlessDBFeature("__class__")
class NotSkippedTests(TestCase):
def test_dummy(self):
return
@skipUnlessDBFeature("missing")
@skipIfDBFeature("__class__")
class SkippedTests(TestCase):
def test_will_be_skipped(self):
self.fail("We should never arrive here.")
@skipIfDBFeature("__dict__")
class SkippedTestsSubclass(SkippedTests):
pass
test_suite = unittest.TestSuite()
test_suite.addTest(NotSkippedTests("test_dummy"))
try:
test_suite.addTest(SkippedTests("test_will_be_skipped"))
test_suite.addTest(SkippedTestsSubclass("test_will_be_skipped"))
except unittest.SkipTest:
self.fail("SkipTest should not be raised here.")
result = unittest.TextTestRunner(stream=StringIO()).run(test_suite)
self.assertEqual(result.testsRun, 3)
self.assertEqual(len(result.skipped), 2)
self.assertEqual(result.skipped[0][1], "Database has feature(s) __class__")
self.assertEqual(result.skipped[1][1], "Database has feature(s) __class__")
def test_missing_default_databases(self):
@skipIfDBFeature("missing")
class MissingDatabases(SimpleTestCase):
def test_assertion_error(self):
pass
suite = unittest.TestSuite()
try:
suite.addTest(MissingDatabases("test_assertion_error"))
except unittest.SkipTest:
self.fail("SkipTest should not be raised at this stage")
runner = unittest.TextTestRunner(stream=StringIO())
msg = (
"skipIfDBFeature cannot be used on <class 'test_utils.tests."
"SkippingClassTestCase.test_missing_default_databases.<locals>."
"MissingDatabases'> as it doesn't allow queries against the "
"'default' database."
)
with self.assertRaisesMessage(ValueError, msg):
runner.run(suite)
@override_settings(ROOT_URLCONF="test_utils.urls")
class AssertNumQueriesTests(TestCase):
def test_assert_num_queries(self):
def test_func():
raise ValueError
with self.assertRaises(ValueError):
self.assertNumQueries(2, test_func)
def test_assert_num_queries_with_client(self):
person = Person.objects.create(name="test")
self.assertNumQueries(
1, self.client.get, "/test_utils/get_person/%s/" % person.pk
)
self.assertNumQueries(
1, self.client.get, "/test_utils/get_person/%s/" % person.pk
)
def test_func():
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.assertNumQueries(2, test_func)
@unittest.skipUnless(
connection.vendor != "sqlite" or not connection.is_in_memory_db(),
"For SQLite in-memory tests, closing the connection destroys the database.",
)
class AssertNumQueriesUponConnectionTests(TransactionTestCase):
available_apps = []
def test_ignores_connection_configuration_queries(self):
real_ensure_connection = connection.ensure_connection
connection.close()
def make_configuration_query():
is_opening_connection = connection.connection is None
real_ensure_connection()
if is_opening_connection:
# Avoid infinite recursion. Creating a cursor calls
# ensure_connection() which is currently mocked by this method.
with connection.cursor() as cursor:
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
ensure_connection = (
"django.db.backends.base.base.BaseDatabaseWrapper.ensure_connection"
)
with mock.patch(ensure_connection, side_effect=make_configuration_query):
with self.assertNumQueries(1):
list(Car.objects.all())
class AssertQuerysetEqualTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.p1 = Person.objects.create(name="p1")
cls.p2 = Person.objects.create(name="p2")
def test_empty(self):
self.assertQuerysetEqual(Person.objects.filter(name="p3"), [])
def test_ordered(self):
self.assertQuerysetEqual(
Person.objects.order_by("name"),
[self.p1, self.p2],
)
def test_unordered(self):
self.assertQuerysetEqual(
Person.objects.order_by("name"), [self.p2, self.p1], ordered=False
)
def test_queryset(self):
self.assertQuerysetEqual(
Person.objects.order_by("name"),
Person.objects.order_by("name"),
)
def test_flat_values_list(self):
self.assertQuerysetEqual(
Person.objects.order_by("name").values_list("name", flat=True),
["p1", "p2"],
)
def test_transform(self):
self.assertQuerysetEqual(
Person.objects.order_by("name"),
[self.p1.pk, self.p2.pk],
transform=lambda x: x.pk,
)
def test_repr_transform(self):
self.assertQuerysetEqual(
Person.objects.order_by("name"),
[repr(self.p1), repr(self.p2)],
transform=repr,
)
def test_undefined_order(self):
# Using an unordered queryset with more than one ordered value
# is an error.
msg = (
"Trying to compare non-ordered queryset against more than one "
"ordered value."
)
with self.assertRaisesMessage(ValueError, msg):
self.assertQuerysetEqual(
Person.objects.all(),
[self.p1, self.p2],
)
# No error for one value.
self.assertQuerysetEqual(Person.objects.filter(name="p1"), [self.p1])
def test_repeated_values(self):
"""
assertQuerysetEqual checks the number of appearance of each item
when used with option ordered=False.
"""
batmobile = Car.objects.create(name="Batmobile")
k2000 = Car.objects.create(name="K 2000")
PossessedCar.objects.bulk_create(
[
PossessedCar(car=batmobile, belongs_to=self.p1),
PossessedCar(car=batmobile, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
]
)
with self.assertRaises(AssertionError):
self.assertQuerysetEqual(
self.p1.cars.all(), [batmobile, k2000], ordered=False
)
self.assertQuerysetEqual(
self.p1.cars.all(), [batmobile] * 2 + [k2000] * 4, ordered=False
)
def test_maxdiff(self):
names = ["Joe Smith %s" % i for i in range(20)]
Person.objects.bulk_create([Person(name=name) for name in names])
names.append("Extra Person")
with self.assertRaises(AssertionError) as ctx:
self.assertQuerysetEqual(
Person.objects.filter(name__startswith="Joe"),
names,
ordered=False,
transform=lambda p: p.name,
)
self.assertIn("Set self.maxDiff to None to see it.", str(ctx.exception))
original = self.maxDiff
self.maxDiff = None
try:
with self.assertRaises(AssertionError) as ctx:
self.assertQuerysetEqual(
Person.objects.filter(name__startswith="Joe"),
names,
ordered=False,
transform=lambda p: p.name,
)
finally:
self.maxDiff = original
exception_msg = str(ctx.exception)
self.assertNotIn("Set self.maxDiff to None to see it.", exception_msg)
for name in names:
self.assertIn(name, exception_msg)
@override_settings(ROOT_URLCONF="test_utils.urls")
class CaptureQueriesContextManagerTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.person_pk = str(Person.objects.create(name="test").pk)
def test_simple(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.get(pk=self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]["sql"])
with CaptureQueriesContext(connection) as captured_queries:
pass
self.assertEqual(0, len(captured_queries))
def test_within(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.get(pk=self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]["sql"])
def test_nested(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.count()
with CaptureQueriesContext(connection) as nested_captured_queries:
Person.objects.count()
self.assertEqual(1, len(nested_captured_queries))
self.assertEqual(2, len(captured_queries))
def test_failure(self):
with self.assertRaises(TypeError):
with CaptureQueriesContext(connection):
raise TypeError
def test_with_client(self):
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]["sql"])
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]["sql"])
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 2)
self.assertIn(self.person_pk, captured_queries[0]["sql"])
self.assertIn(self.person_pk, captured_queries[1]["sql"])
@override_settings(ROOT_URLCONF="test_utils.urls")
class AssertNumQueriesContextManagerTests(TestCase):
def test_simple(self):
with self.assertNumQueries(0):
pass
with self.assertNumQueries(1):
Person.objects.count()
with self.assertNumQueries(2):
Person.objects.count()
Person.objects.count()
def test_failure(self):
msg = "1 != 2 : 1 queries executed, 2 expected\nCaptured queries were:\n1."
with self.assertRaisesMessage(AssertionError, msg):
with self.assertNumQueries(2):
Person.objects.count()
with self.assertRaises(TypeError):
with self.assertNumQueries(4000):
raise TypeError
def test_with_client(self):
person = Person.objects.create(name="test")
with self.assertNumQueries(1):
self.client.get("/test_utils/get_person/%s/" % person.pk)
with self.assertNumQueries(1):
self.client.get("/test_utils/get_person/%s/" % person.pk)
with self.assertNumQueries(2):
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.client.get("/test_utils/get_person/%s/" % person.pk)
@override_settings(ROOT_URLCONF="test_utils.urls")
class AssertTemplateUsedContextManagerTests(SimpleTestCase):
def test_usage(self):
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/base.html")
with self.assertTemplateUsed(template_name="template_used/base.html"):
render_to_string("template_used/base.html")
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/include.html")
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/extends.html")
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/base.html")
render_to_string("template_used/base.html")
def test_nested_usage(self):
with self.assertTemplateUsed("template_used/base.html"):
with self.assertTemplateUsed("template_used/include.html"):
render_to_string("template_used/include.html")
with self.assertTemplateUsed("template_used/extends.html"):
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/extends.html")
with self.assertTemplateUsed("template_used/base.html"):
with self.assertTemplateUsed("template_used/alternative.html"):
render_to_string("template_used/alternative.html")
render_to_string("template_used/base.html")
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/extends.html")
with self.assertTemplateNotUsed("template_used/base.html"):
render_to_string("template_used/alternative.html")
render_to_string("template_used/base.html")
def test_not_used(self):
with self.assertTemplateNotUsed("template_used/base.html"):
pass
with self.assertTemplateNotUsed("template_used/alternative.html"):
pass
def test_error_message(self):
msg = "No templates used to render the response"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed("template_used/base.html"):
pass
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(template_name="template_used/base.html"):
pass
msg2 = (
"Template 'template_used/base.html' was not a template used to render "
"the response. Actual template(s) used: template_used/alternative.html"
)
with self.assertRaisesMessage(AssertionError, msg2):
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/alternative.html")
with self.assertRaisesMessage(
AssertionError, "No templates used to render the response"
):
response = self.client.get("/test_utils/no_template_used/")
self.assertTemplateUsed(response, "template_used/base.html")
def test_msg_prefix(self):
msg_prefix = "Prefix"
msg = f"{msg_prefix}: No templates used to render the response"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(
"template_used/base.html", msg_prefix=msg_prefix
):
pass
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(
template_name="template_used/base.html",
msg_prefix=msg_prefix,
):
pass
msg = (
f"{msg_prefix}: Template 'template_used/base.html' was not a "
f"template used to render the response. Actual template(s) used: "
f"template_used/alternative.html"
)
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(
"template_used/base.html", msg_prefix=msg_prefix
):
render_to_string("template_used/alternative.html")
def test_count(self):
with self.assertTemplateUsed("template_used/base.html", count=2):
render_to_string("template_used/base.html")
render_to_string("template_used/base.html")
msg = (
"Template 'template_used/base.html' was expected to be rendered "
"3 time(s) but was actually rendered 2 time(s)."
)
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed("template_used/base.html", count=3):
render_to_string("template_used/base.html")
render_to_string("template_used/base.html")
def test_failure(self):
msg = "response and/or template_name argument must be provided"
with self.assertRaisesMessage(TypeError, msg):
with self.assertTemplateUsed():
pass
msg = "No templates used to render the response"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(""):
pass
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(""):
render_to_string("template_used/base.html")
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(template_name=""):
pass
msg = (
"Template 'template_used/base.html' was not a template used to "
"render the response. Actual template(s) used: "
"template_used/alternative.html"
)
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/alternative.html")
def test_assert_used_on_http_response(self):
response = HttpResponse()
msg = "%s() is only usable on responses fetched using the Django test Client."
with self.assertRaisesMessage(ValueError, msg % "assertTemplateUsed"):
self.assertTemplateUsed(response, "template.html")
with self.assertRaisesMessage(ValueError, msg % "assertTemplateNotUsed"):
self.assertTemplateNotUsed(response, "template.html")
class HTMLEqualTests(SimpleTestCase):
def test_html_parser(self):
element = parse_html("<div><p>Hello</p></div>")
self.assertEqual(len(element.children), 1)
self.assertEqual(element.children[0].name, "p")
self.assertEqual(element.children[0].children[0], "Hello")
parse_html("<p>")
parse_html("<p attr>")
dom = parse_html("<p>foo")
self.assertEqual(len(dom.children), 1)
self.assertEqual(dom.name, "p")
self.assertEqual(dom[0], "foo")
def test_parse_html_in_script(self):
parse_html('<script>var a = "<p" + ">";</script>')
parse_html(
"""
<script>
var js_sha_link='<p>***</p>';
</script>
"""
)
# script content will be parsed to text
dom = parse_html(
"""
<script><p>foo</p> '</scr'+'ipt>' <span>bar</span></script>
"""
)
self.assertEqual(len(dom.children), 1)
self.assertEqual(dom.children[0], "<p>foo</p> '</scr'+'ipt>' <span>bar</span>")
def test_self_closing_tags(self):
self_closing_tags = [
"area",
"base",
"br",
"col",
"embed",
"hr",
"img",
"input",
"link",
"meta",
"param",
"source",
"track",
"wbr",
# Deprecated tags
"frame",
"spacer",
]
for tag in self_closing_tags:
with self.subTest(tag):
dom = parse_html("<p>Hello <%s> world</p>" % tag)
self.assertEqual(len(dom.children), 3)
self.assertEqual(dom[0], "Hello")
self.assertEqual(dom[1].name, tag)
self.assertEqual(dom[2], "world")
dom = parse_html("<p>Hello <%s /> world</p>" % tag)
self.assertEqual(len(dom.children), 3)
self.assertEqual(dom[0], "Hello")
self.assertEqual(dom[1].name, tag)
self.assertEqual(dom[2], "world")
def test_simple_equal_html(self):
self.assertHTMLEqual("", "")
self.assertHTMLEqual("<p></p>", "<p></p>")
self.assertHTMLEqual("<p></p>", " <p> </p> ")
self.assertHTMLEqual("<div><p>Hello</p></div>", "<div><p>Hello</p></div>")
self.assertHTMLEqual("<div><p>Hello</p></div>", "<div> <p>Hello</p> </div>")
self.assertHTMLEqual("<div>\n<p>Hello</p></div>", "<div><p>Hello</p></div>\n")
self.assertHTMLEqual(
"<div><p>Hello\nWorld !</p></div>", "<div><p>Hello World\n!</p></div>"
)
self.assertHTMLEqual(
"<div><p>Hello\nWorld !</p></div>", "<div><p>Hello World\n!</p></div>"
)
self.assertHTMLEqual("<p>Hello World !</p>", "<p>Hello World\n\n!</p>")
self.assertHTMLEqual("<p> </p>", "<p></p>")
self.assertHTMLEqual("<p/>", "<p></p>")
self.assertHTMLEqual("<p />", "<p></p>")
self.assertHTMLEqual("<input checked>", '<input checked="checked">')
self.assertHTMLEqual("<p>Hello", "<p> Hello")
self.assertHTMLEqual("<p>Hello</p>World", "<p>Hello</p> World")
def test_ignore_comments(self):
self.assertHTMLEqual(
"<div>Hello<!-- this is a comment --> World!</div>",
"<div>Hello World!</div>",
)
def test_unequal_html(self):
self.assertHTMLNotEqual("<p>Hello</p>", "<p>Hello!</p>")
self.assertHTMLNotEqual("<p>foobar</p>", "<p>foo bar</p>")
self.assertHTMLNotEqual("<p>foo bar</p>", "<p>foo bar</p>")
self.assertHTMLNotEqual("<p>foo nbsp</p>", "<p>foo </p>")
self.assertHTMLNotEqual("<p>foo #20</p>", "<p>foo </p>")
self.assertHTMLNotEqual(
"<p><span>Hello</span><span>World</span></p>",
"<p><span>Hello</span>World</p>",
)
self.assertHTMLNotEqual(
"<p><span>Hello</span>World</p>",
"<p><span>Hello</span><span>World</span></p>",
)
def test_attributes(self):
self.assertHTMLEqual(
'<input type="text" id="id_name" />', '<input id="id_name" type="text" />'
)
self.assertHTMLEqual(
"""<input type='text' id="id_name" />""",
'<input id="id_name" type="text" />',
)
self.assertHTMLNotEqual(
'<input type="text" id="id_name" />',
'<input type="password" id="id_name" />',
)
def test_class_attribute(self):
pairs = [
('<p class="foo bar"></p>', '<p class="bar foo"></p>'),
('<p class=" foo bar "></p>', '<p class="bar foo"></p>'),
('<p class=" foo bar "></p>', '<p class="bar foo"></p>'),
('<p class="foo\tbar"></p>', '<p class="bar foo"></p>'),
('<p class="\tfoo\tbar\t"></p>', '<p class="bar foo"></p>'),
('<p class="\t\t\tfoo\t\t\tbar\t\t\t"></p>', '<p class="bar foo"></p>'),
('<p class="\t \nfoo \t\nbar\n\t "></p>', '<p class="bar foo"></p>'),
]
for html1, html2 in pairs:
with self.subTest(html1):
self.assertHTMLEqual(html1, html2)
def test_boolean_attribute(self):
html1 = "<input checked>"
html2 = '<input checked="">'
html3 = '<input checked="checked">'
self.assertHTMLEqual(html1, html2)
self.assertHTMLEqual(html1, html3)
self.assertHTMLEqual(html2, html3)
self.assertHTMLNotEqual(html1, '<input checked="invalid">')
self.assertEqual(str(parse_html(html1)), "<input checked>")
self.assertEqual(str(parse_html(html2)), "<input checked>")
self.assertEqual(str(parse_html(html3)), "<input checked>")
def test_non_boolean_attibutes(self):
html1 = "<input value>"
html2 = '<input value="">'
html3 = '<input value="value">'
self.assertHTMLEqual(html1, html2)
self.assertHTMLNotEqual(html1, html3)
self.assertEqual(str(parse_html(html1)), '<input value="">')
self.assertEqual(str(parse_html(html2)), '<input value="">')
def test_normalize_refs(self):
pairs = [
("'", "'"),
("'", "'"),
("'", "'"),
("'", "'"),
("'", "'"),
("'", "'"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
]
for pair in pairs:
with self.subTest(repr(pair)):
self.assertHTMLEqual(*pair)
def test_complex_examples(self):
self.assertHTMLEqual(
"""<tr><th><label for="id_first_name">First name:</label></th>
<td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th>
<td><input type="text" id="id_last_name" name="last_name" value="Lennon" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th>
<td><input type="text" value="1940-10-9" name="birthday" id="id_birthday" /></td></tr>""", # NOQA
"""
<tr><th>
<label for="id_first_name">First name:</label></th><td>
<input type="text" name="first_name" value="John" id="id_first_name" />
</td></tr>
<tr><th>
<label for="id_last_name">Last name:</label></th><td>
<input type="text" name="last_name" value="Lennon" id="id_last_name" />
</td></tr>
<tr><th>
<label for="id_birthday">Birthday:</label></th><td>
<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />
</td></tr>
""",
)
self.assertHTMLEqual(
"""<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet">
<title>Document</title>
<meta attribute="value">
</head>
<body>
<p>
This is a valid paragraph
<div> this is a div AFTER the p</div>
</body>
</html>""",
"""
<html>
<head>
<link rel="stylesheet">
<title>Document</title>
<meta attribute="value">
</head>
<body>
<p> This is a valid paragraph
<!-- browsers would close the p tag here -->
<div> this is a div AFTER the p</div>
</p> <!-- this is invalid HTML parsing, but it should make no
difference in most cases -->
</body>
</html>""",
)
def test_html_contain(self):
# equal html contains each other
dom1 = parse_html("<p>foo")
dom2 = parse_html("<p>foo</p>")
self.assertIn(dom1, dom2)
self.assertIn(dom2, dom1)
dom2 = parse_html("<div><p>foo</p></div>")
self.assertIn(dom1, dom2)
self.assertNotIn(dom2, dom1)
self.assertNotIn("<p>foo</p>", dom2)
self.assertIn("foo", dom2)
# when a root element is used ...
dom1 = parse_html("<p>foo</p><p>bar</p>")
dom2 = parse_html("<p>foo</p><p>bar</p>")
self.assertIn(dom1, dom2)
dom1 = parse_html("<p>foo</p>")
self.assertIn(dom1, dom2)
dom1 = parse_html("<p>bar</p>")
self.assertIn(dom1, dom2)
dom1 = parse_html("<div><p>foo</p><p>bar</p></div>")
self.assertIn(dom2, dom1)
def test_count(self):
# equal html contains each other one time
dom1 = parse_html("<p>foo")
dom2 = parse_html("<p>foo</p>")
self.assertEqual(dom1.count(dom2), 1)
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html("<p>foo</p><p>bar</p>")
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html("<p>foo foo</p><p>foo</p>")
self.assertEqual(dom2.count("foo"), 3)
dom2 = parse_html('<p class="bar">foo</p>')
self.assertEqual(dom2.count("bar"), 0)
self.assertEqual(dom2.count("class"), 0)
self.assertEqual(dom2.count("p"), 0)
self.assertEqual(dom2.count("o"), 2)
dom2 = parse_html("<p>foo</p><p>foo</p>")
self.assertEqual(dom2.count(dom1), 2)
dom2 = parse_html('<div><p>foo<input type=""></p><p>foo</p></div>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html("<div><div><p>foo</p></div></div>")
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html("<p>foo<p>foo</p></p>")
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html("<p>foo<p>bar</p></p>")
self.assertEqual(dom2.count(dom1), 0)
# HTML with a root element contains the same HTML with no root element.
dom1 = parse_html("<p>foo</p><p>bar</p>")
dom2 = parse_html("<div><p>foo</p><p>bar</p></div>")
self.assertEqual(dom2.count(dom1), 1)
# Target of search is a sequence of child elements and appears more
# than once.
dom2 = parse_html("<div><p>foo</p><p>bar</p><p>foo</p><p>bar</p></div>")
self.assertEqual(dom2.count(dom1), 2)
# Searched HTML has additional children.
dom1 = parse_html("<a/><b/>")
dom2 = parse_html("<a/><b/><c/>")
self.assertEqual(dom2.count(dom1), 1)
# No match found in children.
dom1 = parse_html("<b/><a/>")
self.assertEqual(dom2.count(dom1), 0)
# Target of search found among children and grandchildren.
dom1 = parse_html("<b/><b/>")
dom2 = parse_html("<a><b/><b/></a><b/><b/>")
self.assertEqual(dom2.count(dom1), 2)
def test_root_element_escaped_html(self):
html = "<br>"
parsed = parse_html(html)
self.assertEqual(str(parsed), html)
def test_parsing_errors(self):
with self.assertRaises(AssertionError):
self.assertHTMLEqual("<p>", "")
with self.assertRaises(AssertionError):
self.assertHTMLEqual("", "<p>")
error_msg = (
"First argument is not valid HTML:\n"
"('Unexpected end tag `div` (Line 1, Column 6)', (1, 6))"
)
with self.assertRaisesMessage(AssertionError, error_msg):
self.assertHTMLEqual("< div></ div>", "<div></div>")
with self.assertRaises(HTMLParseError):
parse_html("</p>")
def test_escaped_html_errors(self):
msg = "<p>\n<foo>\n</p> != <p>\n<foo>\n</p>\n"
with self.assertRaisesMessage(AssertionError, msg):
self.assertHTMLEqual("<p><foo></p>", "<p><foo></p>")
with self.assertRaisesMessage(AssertionError, msg):
self.assertHTMLEqual("<p><foo></p>", "<p><foo></p>")
def test_contains_html(self):
response = HttpResponse(
"""<body>
This is a form: <form method="get">
<input type="text" name="Hello" />
</form></body>"""
)
self.assertNotContains(response, "<input name='Hello' type='text'>")
self.assertContains(response, '<form method="get">')
self.assertContains(response, "<input name='Hello' type='text'>", html=True)
self.assertNotContains(response, '<form method="get">', html=True)
invalid_response = HttpResponse("""<body <bad>>""")
with self.assertRaises(AssertionError):
self.assertContains(invalid_response, "<p></p>")
with self.assertRaises(AssertionError):
self.assertContains(response, '<p "whats" that>')
def test_unicode_handling(self):
response = HttpResponse(
'<p class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</p>'
)
self.assertContains(
response,
'<p class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</p>',
html=True,
)
class JSONEqualTests(SimpleTestCase):
def test_simple_equal(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr1": "foo", "attr2":"baz"}'
self.assertJSONEqual(json1, json2)
def test_simple_equal_unordered(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr2":"baz", "attr1": "foo"}'
self.assertJSONEqual(json1, json2)
def test_simple_equal_raise(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONEqual(json1, json2)
def test_equal_parsing_errors(self):
invalid_json = '{"attr1": "foo, "attr2":"baz"}'
valid_json = '{"attr1": "foo", "attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONEqual(invalid_json, valid_json)
with self.assertRaises(AssertionError):
self.assertJSONEqual(valid_json, invalid_json)
def test_simple_not_equal(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr2":"baz"}'
self.assertJSONNotEqual(json1, json2)
def test_simple_not_equal_raise(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr1": "foo", "attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONNotEqual(json1, json2)
def test_not_equal_parsing_errors(self):
invalid_json = '{"attr1": "foo, "attr2":"baz"}'
valid_json = '{"attr1": "foo", "attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONNotEqual(invalid_json, valid_json)
with self.assertRaises(AssertionError):
self.assertJSONNotEqual(valid_json, invalid_json)
class XMLEqualTests(SimpleTestCase):
def test_simple_equal(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr1='a' attr2='b' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_unordered(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_raise(self):
xml1 = "<elem attr1='a' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_raises_message(self):
xml1 = "<elem attr1='a' />"
xml2 = "<elem attr2='b' attr1='a' />"
msg = """{xml1} != {xml2}
- <elem attr1='a' />
+ <elem attr2='b' attr1='a' />
? ++++++++++
""".format(
xml1=repr(xml1), xml2=repr(xml2)
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertXMLEqual(xml1, xml2)
def test_simple_not_equal(self):
xml1 = "<elem attr1='a' attr2='c' />"
xml2 = "<elem attr1='a' attr2='b' />"
self.assertXMLNotEqual(xml1, xml2)
def test_simple_not_equal_raise(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLNotEqual(xml1, xml2)
def test_parsing_errors(self):
xml_unvalid = "<elem attr1='a attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLNotEqual(xml_unvalid, xml2)
def test_comment_root(self):
xml1 = "<?xml version='1.0'?><!-- comment1 --><elem attr1='a' attr2='b' />"
xml2 = "<?xml version='1.0'?><!-- comment2 --><elem attr2='b' attr1='a' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_with_leading_or_trailing_whitespace(self):
xml1 = "<elem>foo</elem> \t\n"
xml2 = " \t\n<elem>foo</elem>"
self.assertXMLEqual(xml1, xml2)
def test_simple_not_equal_with_whitespace_in_the_middle(self):
xml1 = "<elem>foo</elem><elem>bar</elem>"
xml2 = "<elem>foo</elem> <elem>bar</elem>"
self.assertXMLNotEqual(xml1, xml2)
def test_doctype_root(self):
xml1 = '<?xml version="1.0"?><!DOCTYPE root SYSTEM "example1.dtd"><root />'
xml2 = '<?xml version="1.0"?><!DOCTYPE root SYSTEM "example2.dtd"><root />'
self.assertXMLEqual(xml1, xml2)
def test_processing_instruction(self):
xml1 = (
'<?xml version="1.0"?>'
'<?xml-model href="http://www.example1.com"?><root />'
)
xml2 = (
'<?xml version="1.0"?>'
'<?xml-model href="http://www.example2.com"?><root />'
)
self.assertXMLEqual(xml1, xml2)
self.assertXMLEqual(
'<?xml-stylesheet href="style1.xslt" type="text/xsl"?><root />',
'<?xml-stylesheet href="style2.xslt" type="text/xsl"?><root />',
)
class SkippingExtraTests(TestCase):
fixtures = ["should_not_be_loaded.json"]
# HACK: This depends on internals of our TestCase subclasses
def __call__(self, result=None):
# Detect fixture loading by counting SQL queries, should be zero
with self.assertNumQueries(0):
super().__call__(result)
@unittest.skip("Fixture loading should not be performed for skipped tests.")
def test_fixtures_are_skipped(self):
pass
class AssertRaisesMsgTest(SimpleTestCase):
def test_assert_raises_message(self):
msg = "'Expected message' not found in 'Unexpected message'"
# context manager form of assertRaisesMessage()
with self.assertRaisesMessage(AssertionError, msg):
with self.assertRaisesMessage(ValueError, "Expected message"):
raise ValueError("Unexpected message")
# callable form
def func():
raise ValueError("Unexpected message")
with self.assertRaisesMessage(AssertionError, msg):
self.assertRaisesMessage(ValueError, "Expected message", func)
def test_special_re_chars(self):
"""assertRaisesMessage shouldn't interpret RE special chars."""
def func1():
raise ValueError("[.*x+]y?")
with self.assertRaisesMessage(ValueError, "[.*x+]y?"):
func1()
class AssertWarnsMessageTests(SimpleTestCase):
def test_context_manager(self):
with self.assertWarnsMessage(UserWarning, "Expected message"):
warnings.warn("Expected message", UserWarning)
def test_context_manager_failure(self):
msg = "Expected message' not found in 'Unexpected message'"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertWarnsMessage(UserWarning, "Expected message"):
warnings.warn("Unexpected message", UserWarning)
def test_callable(self):
def func():
warnings.warn("Expected message", UserWarning)
self.assertWarnsMessage(UserWarning, "Expected message", func)
def test_special_re_chars(self):
def func1():
warnings.warn("[.*x+]y?", UserWarning)
with self.assertWarnsMessage(UserWarning, "[.*x+]y?"):
func1()
# TODO: Remove when dropping support for PY39.
class AssertNoLogsTest(SimpleTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
logging.config.dictConfig(DEFAULT_LOGGING)
cls.addClassCleanup(logging.config.dictConfig, settings.LOGGING)
def setUp(self):
self.logger = logging.getLogger("django")
@override_settings(DEBUG=True)
def test_fails_when_log_emitted(self):
msg = "Unexpected logs found: ['INFO:django:FAIL!']"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertNoLogs("django", "INFO"):
self.logger.info("FAIL!")
@override_settings(DEBUG=True)
def test_text_level(self):
with self.assertNoLogs("django", "INFO"):
self.logger.debug("DEBUG logs are ignored.")
@override_settings(DEBUG=True)
def test_int_level(self):
with self.assertNoLogs("django", logging.INFO):
self.logger.debug("DEBUG logs are ignored.")
@override_settings(DEBUG=True)
def test_default_level(self):
with self.assertNoLogs("django"):
self.logger.debug("DEBUG logs are ignored.")
@override_settings(DEBUG=True)
def test_does_not_hide_other_failures(self):
msg = "1 != 2"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertNoLogs("django"):
self.assertEqual(1, 2)
class AssertFieldOutputTests(SimpleTestCase):
def test_assert_field_output(self):
error_invalid = ["Enter a valid email address."]
self.assertFieldOutput(
EmailField, {"[email protected]": "[email protected]"}, {"aaa": error_invalid}
)
with self.assertRaises(AssertionError):
self.assertFieldOutput(
EmailField,
{"[email protected]": "[email protected]"},
{"aaa": error_invalid + ["Another error"]},
)
with self.assertRaises(AssertionError):
self.assertFieldOutput(
EmailField, {"[email protected]": "Wrong output"}, {"aaa": error_invalid}
)
with self.assertRaises(AssertionError):
self.assertFieldOutput(
EmailField,
{"[email protected]": "[email protected]"},
{"aaa": ["Come on, gimme some well formatted data, dude."]},
)
def test_custom_required_message(self):
class MyCustomField(IntegerField):
default_error_messages = {
"required": "This is really required.",
}
self.assertFieldOutput(MyCustomField, {}, {}, empty_value=None)
@override_settings(ROOT_URLCONF="test_utils.urls")
class AssertURLEqualTests(SimpleTestCase):
def test_equal(self):
valid_tests = (
("http://example.com/?", "http://example.com/"),
("http://example.com/?x=1&", "http://example.com/?x=1"),
("http://example.com/?x=1&y=2", "http://example.com/?y=2&x=1"),
("http://example.com/?x=1&y=2", "http://example.com/?y=2&x=1"),
(
"http://example.com/?x=1&y=2&a=1&a=2",
"http://example.com/?a=1&a=2&y=2&x=1",
),
("/path/to/?x=1&y=2&z=3", "/path/to/?z=3&y=2&x=1"),
("?x=1&y=2&z=3", "?z=3&y=2&x=1"),
("/test_utils/no_template_used/", reverse_lazy("no_template_used")),
)
for url1, url2 in valid_tests:
with self.subTest(url=url1):
self.assertURLEqual(url1, url2)
def test_not_equal(self):
invalid_tests = (
# Protocol must be the same.
("http://example.com/", "https://example.com/"),
("http://example.com/?x=1&x=2", "https://example.com/?x=2&x=1"),
("http://example.com/?x=1&y=bar&x=2", "https://example.com/?y=bar&x=2&x=1"),
# Parameters of the same name must be in the same order.
("/path/to?a=1&a=2", "/path/to/?a=2&a=1"),
)
for url1, url2 in invalid_tests:
with self.subTest(url=url1), self.assertRaises(AssertionError):
self.assertURLEqual(url1, url2)
def test_message(self):
msg = (
"Expected 'http://example.com/?x=1&x=2' to equal "
"'https://example.com/?x=2&x=1'"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertURLEqual(
"http://example.com/?x=1&x=2", "https://example.com/?x=2&x=1"
)
def test_msg_prefix(self):
msg = (
"Prefix: Expected 'http://example.com/?x=1&x=2' to equal "
"'https://example.com/?x=2&x=1'"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertURLEqual(
"http://example.com/?x=1&x=2",
"https://example.com/?x=2&x=1",
msg_prefix="Prefix: ",
)
class TestForm(Form):
field = CharField()
def clean_field(self):
value = self.cleaned_data.get("field", "")
if value == "invalid":
raise ValidationError("invalid value")
return value
def clean(self):
if self.cleaned_data.get("field") == "invalid_non_field":
raise ValidationError("non-field error")
return self.cleaned_data
@classmethod
def _get_cleaned_form(cls, field_value):
form = cls({"field": field_value})
form.full_clean()
return form
@classmethod
def valid(cls):
return cls._get_cleaned_form("valid")
@classmethod
def invalid(cls, nonfield=False):
return cls._get_cleaned_form("invalid_non_field" if nonfield else "invalid")
class TestFormset(formset_factory(TestForm)):
@classmethod
def _get_cleaned_formset(cls, field_value):
formset = cls(
{
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "0",
"form-0-field": field_value,
}
)
formset.full_clean()
return formset
@classmethod
def valid(cls):
return cls._get_cleaned_formset("valid")
@classmethod
def invalid(cls, nonfield=False, nonform=False):
if nonform:
formset = cls({}, error_messages={"missing_management_form": "error"})
formset.full_clean()
return formset
return cls._get_cleaned_formset("invalid_non_field" if nonfield else "invalid")
class AssertFormErrorTests(SimpleTestCase):
@ignore_warnings(category=RemovedInDjango50Warning)
def test_non_client_response(self):
msg = (
"assertFormError() is only usable on responses fetched using the "
"Django test Client."
)
response = HttpResponse()
with self.assertRaisesMessage(ValueError, msg):
self.assertFormError(response, "form", "field", "invalid value")
@ignore_warnings(category=RemovedInDjango50Warning)
def test_response_with_no_context(self):
msg = "Response did not use any contexts to render the response"
response = mock.Mock(context=[])
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(response, "form", "field", "invalid value")
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
response,
"form",
"field",
"invalid value",
msg_prefix=msg_prefix,
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_form_not_in_context(self):
msg = "The form 'form' was not used to render the response"
response = mock.Mock(context=[{}])
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(response, "form", "field", "invalid value")
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
response, "form", "field", "invalid value", msg_prefix=msg_prefix
)
def test_single_error(self):
self.assertFormError(TestForm.invalid(), "field", "invalid value")
def test_error_list(self):
self.assertFormError(TestForm.invalid(), "field", ["invalid value"])
def test_empty_errors_valid_form(self):
self.assertFormError(TestForm.valid(), "field", [])
def test_empty_errors_valid_form_non_field_errors(self):
self.assertFormError(TestForm.valid(), None, [])
def test_field_not_in_form(self):
msg = (
"The form <TestForm bound=True, valid=False, fields=(field)> does not "
"contain the field 'other_field'."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(TestForm.invalid(), "other_field", "invalid value")
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
TestForm.invalid(),
"other_field",
"invalid value",
msg_prefix=msg_prefix,
)
def test_field_with_no_errors(self):
msg = (
"The errors of field 'field' on form <TestForm bound=True, valid=True, "
"fields=(field)> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormError(TestForm.valid(), "field", "invalid value")
self.assertIn("[] != ['invalid value']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
TestForm.valid(), "field", "invalid value", msg_prefix=msg_prefix
)
def test_field_with_different_error(self):
msg = (
"The errors of field 'field' on form <TestForm bound=True, valid=False, "
"fields=(field)> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormError(TestForm.invalid(), "field", "other error")
self.assertIn("['invalid value'] != ['other error']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
TestForm.invalid(), "field", "other error", msg_prefix=msg_prefix
)
def test_unbound_form(self):
msg = (
"The form <TestForm bound=False, valid=Unknown, fields=(field)> is not "
"bound, it will never have any errors."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(TestForm(), "field", [])
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(TestForm(), "field", [], msg_prefix=msg_prefix)
def test_empty_errors_invalid_form(self):
msg = (
"The errors of field 'field' on form <TestForm bound=True, valid=False, "
"fields=(field)> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormError(TestForm.invalid(), "field", [])
self.assertIn("['invalid value'] != []", str(ctx.exception))
def test_non_field_errors(self):
self.assertFormError(TestForm.invalid(nonfield=True), None, "non-field error")
def test_different_non_field_errors(self):
msg = (
"The non-field errors of form <TestForm bound=True, valid=False, "
"fields=(field)> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormError(
TestForm.invalid(nonfield=True), None, "other non-field error"
)
self.assertIn(
"['non-field error'] != ['other non-field error']", str(ctx.exception)
)
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
TestForm.invalid(nonfield=True),
None,
"other non-field error",
msg_prefix=msg_prefix,
)
class AssertFormsetErrorTests(SimpleTestCase):
@ignore_warnings(category=RemovedInDjango50Warning)
def test_non_client_response(self):
msg = (
"assertFormsetError() is only usable on responses fetched using "
"the Django test Client."
)
response = HttpResponse()
with self.assertRaisesMessage(ValueError, msg):
self.assertFormsetError(response, "formset", 0, "field", "invalid value")
@ignore_warnings(category=RemovedInDjango50Warning)
def test_response_with_no_context(self):
msg = "Response did not use any contexts to render the response"
response = mock.Mock(context=[])
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(response, "formset", 0, "field", "invalid value")
@ignore_warnings(category=RemovedInDjango50Warning)
def test_formset_not_in_context(self):
msg = "The formset 'formset' was not used to render the response"
response = mock.Mock(context=[{}])
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(response, "formset", 0, "field", "invalid value")
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
response, "formset", 0, "field", "invalid value", msg_prefix=msg_prefix
)
def test_single_error(self):
self.assertFormsetError(TestFormset.invalid(), 0, "field", "invalid value")
def test_error_list(self):
self.assertFormsetError(TestFormset.invalid(), 0, "field", ["invalid value"])
def test_empty_errors_valid_formset(self):
self.assertFormsetError(TestFormset.valid(), 0, "field", [])
def test_multiple_forms(self):
formset = TestFormset(
{
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "0",
"form-0-field": "valid",
"form-1-field": "invalid",
}
)
formset.full_clean()
self.assertFormsetError(formset, 0, "field", [])
self.assertFormsetError(formset, 1, "field", ["invalid value"])
def test_field_not_in_form(self):
msg = (
"The form 0 of formset <TestFormset: bound=True valid=False total_forms=1> "
"does not contain the field 'other_field'."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(
TestFormset.invalid(), 0, "other_field", "invalid value"
)
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(),
0,
"other_field",
"invalid value",
msg_prefix=msg_prefix,
)
def test_field_with_no_errors(self):
msg = (
"The errors of field 'field' on form 0 of formset <TestFormset: bound=True "
"valid=True total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(TestFormset.valid(), 0, "field", "invalid value")
self.assertIn("[] != ['invalid value']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.valid(), 0, "field", "invalid value", msg_prefix=msg_prefix
)
def test_field_with_different_error(self):
msg = (
"The errors of field 'field' on form 0 of formset <TestFormset: bound=True "
"valid=False total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(TestFormset.invalid(), 0, "field", "other error")
self.assertIn("['invalid value'] != ['other error']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(), 0, "field", "other error", msg_prefix=msg_prefix
)
def test_unbound_formset(self):
msg = (
"The formset <TestFormset: bound=False valid=Unknown total_forms=1> is not "
"bound, it will never have any errors."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(TestFormset(), 0, "field", [])
def test_empty_errors_invalid_formset(self):
msg = (
"The errors of field 'field' on form 0 of formset <TestFormset: bound=True "
"valid=False total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(TestFormset.invalid(), 0, "field", [])
self.assertIn("['invalid value'] != []", str(ctx.exception))
def test_non_field_errors(self):
self.assertFormsetError(
TestFormset.invalid(nonfield=True), 0, None, "non-field error"
)
def test_different_non_field_errors(self):
msg = (
"The non-field errors of form 0 of formset <TestFormset: bound=True "
"valid=False total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(
TestFormset.invalid(nonfield=True), 0, None, "other non-field error"
)
self.assertIn(
"['non-field error'] != ['other non-field error']", str(ctx.exception)
)
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(nonfield=True),
0,
None,
"other non-field error",
msg_prefix=msg_prefix,
)
def test_no_non_field_errors(self):
msg = (
"The non-field errors of form 0 of formset <TestFormset: bound=True "
"valid=False total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(TestFormset.invalid(), 0, None, "non-field error")
self.assertIn("[] != ['non-field error']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(), 0, None, "non-field error", msg_prefix=msg_prefix
)
def test_non_form_errors(self):
self.assertFormsetError(TestFormset.invalid(nonform=True), None, None, "error")
def test_different_non_form_errors(self):
msg = (
"The non-form errors of formset <TestFormset: bound=True valid=False "
"total_forms=0> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(
TestFormset.invalid(nonform=True), None, None, "other error"
)
self.assertIn("['error'] != ['other error']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(nonform=True),
None,
None,
"other error",
msg_prefix=msg_prefix,
)
def test_no_non_form_errors(self):
msg = (
"The non-form errors of formset <TestFormset: bound=True valid=False "
"total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(TestFormset.invalid(), None, None, "error")
self.assertIn("[] != ['error']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(),
None,
None,
"error",
msg_prefix=msg_prefix,
)
def test_non_form_errors_with_field(self):
msg = "You must use field=None with form_index=None."
with self.assertRaisesMessage(ValueError, msg):
self.assertFormsetError(
TestFormset.invalid(nonform=True), None, "field", "error"
)
def test_form_index_too_big(self):
msg = (
"The formset <TestFormset: bound=True valid=False total_forms=1> only has "
"1 form."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(TestFormset.invalid(), 2, "field", "error")
def test_form_index_too_big_plural(self):
formset = TestFormset(
{
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "0",
"form-0-field": "valid",
"form-1-field": "valid",
}
)
formset.full_clean()
msg = (
"The formset <TestFormset: bound=True valid=True total_forms=2> only has 2 "
"forms."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(formset, 2, "field", "error")
# RemovedInDjango50Warning
class AssertFormErrorDeprecationTests(SimpleTestCase):
"""
Exhaustively test all possible combinations of args/kwargs for the old
signature.
"""
@ignore_warnings(category=RemovedInDjango50Warning)
def test_assert_form_error_errors_none(self):
msg = (
"The errors of field 'field' on form <TestForm bound=True, valid=False, "
"fields=(field)> don't match."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(TestForm.invalid(), "field", None)
def test_assert_form_error_errors_none_warning(self):
msg = (
"Passing errors=None to assertFormError() is deprecated, use "
"errors=[] instead."
)
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
self.assertFormError(TestForm.valid(), "field", None)
def _assert_form_error_old_api_cases(self, form, field, errors, msg_prefix):
response = mock.Mock(context=[{"form": TestForm.invalid()}])
return (
((response, form, field, errors), {}),
((response, form, field, errors, msg_prefix), {}),
((response, form, field, errors), {"msg_prefix": msg_prefix}),
((response, form, field), {"errors": errors}),
((response, form, field), {"errors": errors, "msg_prefix": msg_prefix}),
((response, form), {"field": field, "errors": errors}),
(
(response, form),
{"field": field, "errors": errors, "msg_prefix": msg_prefix},
),
((response,), {"form": form, "field": field, "errors": errors}),
(
(response,),
{
"form": form,
"field": field,
"errors": errors,
"msg_prefix": msg_prefix,
},
),
(
(),
{"response": response, "form": form, "field": field, "errors": errors},
),
(
(),
{
"response": response,
"form": form,
"field": field,
"errors": errors,
"msg_prefix": msg_prefix,
},
),
)
def test_assert_form_error_old_api(self):
deprecation_msg = (
"Passing response to assertFormError() is deprecated. Use the form object "
"directly: assertFormError(response.context['form'], 'field', ...)"
)
for args, kwargs in self._assert_form_error_old_api_cases(
form="form",
field="field",
errors=["invalid value"],
msg_prefix="Custom prefix",
):
with self.subTest(args=args, kwargs=kwargs):
with self.assertWarnsMessage(RemovedInDjango50Warning, deprecation_msg):
self.assertFormError(*args, **kwargs)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_assert_form_error_old_api_assertion_error(self):
for args, kwargs in self._assert_form_error_old_api_cases(
form="form",
field="field",
errors=["other error"],
msg_prefix="Custom prefix",
):
with self.subTest(args=args, kwargs=kwargs):
with self.assertRaises(AssertionError):
self.assertFormError(*args, **kwargs)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_assert_formset_error_errors_none(self):
msg = (
"The errors of field 'field' on form 0 of formset <TestFormset: bound=True "
"valid=False total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(TestFormset.invalid(), 0, "field", None)
def test_assert_formset_error_errors_none_warning(self):
msg = (
"Passing errors=None to assertFormsetError() is deprecated, use "
"errors=[] instead."
)
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
self.assertFormsetError(TestFormset.valid(), 0, "field", None)
def _assert_formset_error_old_api_cases(
self, formset, form_index, field, errors, msg_prefix
):
response = mock.Mock(context=[{"formset": TestFormset.invalid()}])
return (
((response, formset, form_index, field, errors), {}),
((response, formset, form_index, field, errors, msg_prefix), {}),
(
(response, formset, form_index, field, errors),
{"msg_prefix": msg_prefix},
),
((response, formset, form_index, field), {"errors": errors}),
(
(response, formset, form_index, field),
{"errors": errors, "msg_prefix": msg_prefix},
),
((response, formset, form_index), {"field": field, "errors": errors}),
(
(response, formset, form_index),
{"field": field, "errors": errors, "msg_prefix": msg_prefix},
),
(
(response, formset),
{"form_index": form_index, "field": field, "errors": errors},
),
(
(response, formset),
{
"form_index": form_index,
"field": field,
"errors": errors,
"msg_prefix": msg_prefix,
},
),
(
(response,),
{
"formset": formset,
"form_index": form_index,
"field": field,
"errors": errors,
},
),
(
(response,),
{
"formset": formset,
"form_index": form_index,
"field": field,
"errors": errors,
"msg_prefix": msg_prefix,
},
),
(
(),
{
"response": response,
"formset": formset,
"form_index": form_index,
"field": field,
"errors": errors,
},
),
(
(),
{
"response": response,
"formset": formset,
"form_index": form_index,
"field": field,
"errors": errors,
"msg_prefix": msg_prefix,
},
),
)
def test_assert_formset_error_old_api(self):
deprecation_msg = (
"Passing response to assertFormsetError() is deprecated. Use the formset "
"object directly: assertFormsetError(response.context['formset'], 0, ...)"
)
for args, kwargs in self._assert_formset_error_old_api_cases(
formset="formset",
form_index=0,
field="field",
errors=["invalid value"],
msg_prefix="Custom prefix",
):
with self.subTest(args=args, kwargs=kwargs):
with self.assertWarnsMessage(RemovedInDjango50Warning, deprecation_msg):
self.assertFormsetError(*args, **kwargs)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_assert_formset_error_old_api_assertion_error(self):
for args, kwargs in self._assert_formset_error_old_api_cases(
formset="formset",
form_index=0,
field="field",
errors=["other error"],
msg_prefix="Custom prefix",
):
with self.subTest(args=args, kwargs=kwargs):
with self.assertRaises(AssertionError):
self.assertFormsetError(*args, **kwargs)
class FirstUrls:
urlpatterns = [path("first/", empty_response, name="first")]
class SecondUrls:
urlpatterns = [path("second/", empty_response, name="second")]
class SetupTestEnvironmentTests(SimpleTestCase):
def test_setup_test_environment_calling_more_than_once(self):
with self.assertRaisesMessage(
RuntimeError, "setup_test_environment() was already called"
):
setup_test_environment()
def test_allowed_hosts(self):
for type_ in (list, tuple):
with self.subTest(type_=type_):
allowed_hosts = type_("*")
with mock.patch("django.test.utils._TestState") as x:
del x.saved_data
with self.settings(ALLOWED_HOSTS=allowed_hosts):
setup_test_environment()
self.assertEqual(settings.ALLOWED_HOSTS, ["*", "testserver"])
class OverrideSettingsTests(SimpleTestCase):
# #21518 -- If neither override_settings nor a setting_changed receiver
# clears the URL cache between tests, then one of test_first or
# test_second will fail.
@override_settings(ROOT_URLCONF=FirstUrls)
def test_urlconf_first(self):
reverse("first")
@override_settings(ROOT_URLCONF=SecondUrls)
def test_urlconf_second(self):
reverse("second")
def test_urlconf_cache(self):
with self.assertRaises(NoReverseMatch):
reverse("first")
with self.assertRaises(NoReverseMatch):
reverse("second")
with override_settings(ROOT_URLCONF=FirstUrls):
self.client.get(reverse("first"))
with self.assertRaises(NoReverseMatch):
reverse("second")
with override_settings(ROOT_URLCONF=SecondUrls):
with self.assertRaises(NoReverseMatch):
reverse("first")
self.client.get(reverse("second"))
self.client.get(reverse("first"))
with self.assertRaises(NoReverseMatch):
reverse("second")
with self.assertRaises(NoReverseMatch):
reverse("first")
with self.assertRaises(NoReverseMatch):
reverse("second")
def test_override_media_root(self):
"""
Overriding the MEDIA_ROOT setting should be reflected in the
base_location attribute of django.core.files.storage.default_storage.
"""
self.assertEqual(default_storage.base_location, "")
with self.settings(MEDIA_ROOT="test_value"):
self.assertEqual(default_storage.base_location, "test_value")
def test_override_media_url(self):
"""
Overriding the MEDIA_URL setting should be reflected in the
base_url attribute of django.core.files.storage.default_storage.
"""
self.assertEqual(default_storage.base_location, "")
with self.settings(MEDIA_URL="/test_value/"):
self.assertEqual(default_storage.base_url, "/test_value/")
def test_override_file_upload_permissions(self):
"""
Overriding the FILE_UPLOAD_PERMISSIONS setting should be reflected in
the file_permissions_mode attribute of
django.core.files.storage.default_storage.
"""
self.assertEqual(default_storage.file_permissions_mode, 0o644)
with self.settings(FILE_UPLOAD_PERMISSIONS=0o777):
self.assertEqual(default_storage.file_permissions_mode, 0o777)
def test_override_file_upload_directory_permissions(self):
"""
Overriding the FILE_UPLOAD_DIRECTORY_PERMISSIONS setting should be
reflected in the directory_permissions_mode attribute of
django.core.files.storage.default_storage.
"""
self.assertIsNone(default_storage.directory_permissions_mode)
with self.settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777):
self.assertEqual(default_storage.directory_permissions_mode, 0o777)
def test_override_database_routers(self):
"""
Overriding DATABASE_ROUTERS should update the base router.
"""
test_routers = [object()]
with self.settings(DATABASE_ROUTERS=test_routers):
self.assertEqual(router.routers, test_routers)
def test_override_static_url(self):
"""
Overriding the STATIC_URL setting should be reflected in the
base_url attribute of
django.contrib.staticfiles.storage.staticfiles_storage.
"""
with self.settings(STATIC_URL="/test/"):
self.assertEqual(staticfiles_storage.base_url, "/test/")
def test_override_static_root(self):
"""
Overriding the STATIC_ROOT setting should be reflected in the
location attribute of
django.contrib.staticfiles.storage.staticfiles_storage.
"""
with self.settings(STATIC_ROOT="/tmp/test"):
self.assertEqual(staticfiles_storage.location, os.path.abspath("/tmp/test"))
def test_override_staticfiles_storage(self):
"""
Overriding the STATICFILES_STORAGE setting should be reflected in
the value of django.contrib.staticfiles.storage.staticfiles_storage.
"""
new_class = "ManifestStaticFilesStorage"
new_storage = "django.contrib.staticfiles.storage." + new_class
with self.settings(STATICFILES_STORAGE=new_storage):
self.assertEqual(staticfiles_storage.__class__.__name__, new_class)
def test_override_staticfiles_finders(self):
"""
Overriding the STATICFILES_FINDERS setting should be reflected in
the return value of django.contrib.staticfiles.finders.get_finders.
"""
current = get_finders()
self.assertGreater(len(list(current)), 1)
finders = ["django.contrib.staticfiles.finders.FileSystemFinder"]
with self.settings(STATICFILES_FINDERS=finders):
self.assertEqual(len(list(get_finders())), len(finders))
def test_override_staticfiles_dirs(self):
"""
Overriding the STATICFILES_DIRS setting should be reflected in
the locations attribute of the
django.contrib.staticfiles.finders.FileSystemFinder instance.
"""
finder = get_finder("django.contrib.staticfiles.finders.FileSystemFinder")
test_path = "/tmp/test"
expected_location = ("", test_path)
self.assertNotIn(expected_location, finder.locations)
with self.settings(STATICFILES_DIRS=[test_path]):
finder = get_finder("django.contrib.staticfiles.finders.FileSystemFinder")
self.assertIn(expected_location, finder.locations)
class TestBadSetUpTestData(TestCase):
"""
An exception in setUpTestData() shouldn't leak a transaction which would
cascade across the rest of the test suite.
"""
class MyException(Exception):
pass
@classmethod
def setUpClass(cls):
try:
super().setUpClass()
except cls.MyException:
cls._in_atomic_block = connection.in_atomic_block
@classmethod
def tearDownClass(Cls):
# override to avoid a second cls._rollback_atomics() which would fail.
# Normal setUpClass() methods won't have exception handling so this
# method wouldn't typically be run.
pass
@classmethod
def setUpTestData(cls):
# Simulate a broken setUpTestData() method.
raise cls.MyException()
def test_failure_in_setUpTestData_should_rollback_transaction(self):
# setUpTestData() should call _rollback_atomics() so that the
# transaction doesn't leak.
self.assertFalse(self._in_atomic_block)
class CaptureOnCommitCallbacksTests(TestCase):
databases = {"default", "other"}
callback_called = False
def enqueue_callback(self, using="default"):
def hook():
self.callback_called = True
transaction.on_commit(hook, using=using)
def test_no_arguments(self):
with self.captureOnCommitCallbacks() as callbacks:
self.enqueue_callback()
self.assertEqual(len(callbacks), 1)
self.assertIs(self.callback_called, False)
callbacks[0]()
self.assertIs(self.callback_called, True)
def test_using(self):
with self.captureOnCommitCallbacks(using="other") as callbacks:
self.enqueue_callback(using="other")
self.assertEqual(len(callbacks), 1)
self.assertIs(self.callback_called, False)
callbacks[0]()
self.assertIs(self.callback_called, True)
def test_different_using(self):
with self.captureOnCommitCallbacks(using="default") as callbacks:
self.enqueue_callback(using="other")
self.assertEqual(callbacks, [])
def test_execute(self):
with self.captureOnCommitCallbacks(execute=True) as callbacks:
self.enqueue_callback()
self.assertEqual(len(callbacks), 1)
self.assertIs(self.callback_called, True)
def test_pre_callback(self):
def pre_hook():
pass
transaction.on_commit(pre_hook, using="default")
with self.captureOnCommitCallbacks() as callbacks:
self.enqueue_callback()
self.assertEqual(len(callbacks), 1)
self.assertNotEqual(callbacks[0], pre_hook)
def test_with_rolled_back_savepoint(self):
with self.captureOnCommitCallbacks() as callbacks:
try:
with transaction.atomic():
self.enqueue_callback()
raise IntegrityError
except IntegrityError:
# Inner transaction.atomic() has been rolled back.
pass
self.assertEqual(callbacks, [])
def test_execute_recursive(self):
with self.captureOnCommitCallbacks(execute=True) as callbacks:
transaction.on_commit(self.enqueue_callback)
self.assertEqual(len(callbacks), 2)
self.assertIs(self.callback_called, True)
def test_execute_tree(self):
"""
A visualisation of the callback tree tested. Each node is expected to
be visited only once:
└─branch_1
├─branch_2
│ ├─leaf_1
│ └─leaf_2
└─leaf_3
"""
branch_1_call_counter = 0
branch_2_call_counter = 0
leaf_1_call_counter = 0
leaf_2_call_counter = 0
leaf_3_call_counter = 0
def leaf_1():
nonlocal leaf_1_call_counter
leaf_1_call_counter += 1
def leaf_2():
nonlocal leaf_2_call_counter
leaf_2_call_counter += 1
def leaf_3():
nonlocal leaf_3_call_counter
leaf_3_call_counter += 1
def branch_1():
nonlocal branch_1_call_counter
branch_1_call_counter += 1
transaction.on_commit(branch_2)
transaction.on_commit(leaf_3)
def branch_2():
nonlocal branch_2_call_counter
branch_2_call_counter += 1
transaction.on_commit(leaf_1)
transaction.on_commit(leaf_2)
with self.captureOnCommitCallbacks(execute=True) as callbacks:
transaction.on_commit(branch_1)
self.assertEqual(branch_1_call_counter, 1)
self.assertEqual(branch_2_call_counter, 1)
self.assertEqual(leaf_1_call_counter, 1)
self.assertEqual(leaf_2_call_counter, 1)
self.assertEqual(leaf_3_call_counter, 1)
self.assertEqual(callbacks, [branch_1, branch_2, leaf_3, leaf_1, leaf_2])
class DisallowedDatabaseQueriesTests(SimpleTestCase):
def test_disallowed_database_connections(self):
expected_message = (
"Database connections to 'default' are not allowed in SimpleTestCase "
"subclasses. Either subclass TestCase or TransactionTestCase to "
"ensure proper test isolation or add 'default' to "
"test_utils.tests.DisallowedDatabaseQueriesTests.databases to "
"silence this failure."
)
with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message):
connection.connect()
with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message):
connection.temporary_connection()
def test_disallowed_database_queries(self):
expected_message = (
"Database queries to 'default' are not allowed in SimpleTestCase "
"subclasses. Either subclass TestCase or TransactionTestCase to "
"ensure proper test isolation or add 'default' to "
"test_utils.tests.DisallowedDatabaseQueriesTests.databases to "
"silence this failure."
)
with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message):
Car.objects.first()
def test_disallowed_database_chunked_cursor_queries(self):
expected_message = (
"Database queries to 'default' are not allowed in SimpleTestCase "
"subclasses. Either subclass TestCase or TransactionTestCase to "
"ensure proper test isolation or add 'default' to "
"test_utils.tests.DisallowedDatabaseQueriesTests.databases to "
"silence this failure."
)
with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message):
next(Car.objects.iterator())
class AllowedDatabaseQueriesTests(SimpleTestCase):
databases = {"default"}
def test_allowed_database_queries(self):
Car.objects.first()
def test_allowed_database_chunked_cursor_queries(self):
next(Car.objects.iterator(), None)
class DatabaseAliasTests(SimpleTestCase):
def setUp(self):
self.addCleanup(setattr, self.__class__, "databases", self.databases)
def test_no_close_match(self):
self.__class__.databases = {"void"}
message = (
"test_utils.tests.DatabaseAliasTests.databases refers to 'void' which is "
"not defined in settings.DATABASES."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
self._validate_databases()
def test_close_match(self):
self.__class__.databases = {"defualt"}
message = (
"test_utils.tests.DatabaseAliasTests.databases refers to 'defualt' which "
"is not defined in settings.DATABASES. Did you mean 'default'?"
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
self._validate_databases()
def test_match(self):
self.__class__.databases = {"default", "other"}
self.assertEqual(self._validate_databases(), frozenset({"default", "other"}))
def test_all(self):
self.__class__.databases = "__all__"
self.assertEqual(self._validate_databases(), frozenset(connections))
@isolate_apps("test_utils", attr_name="class_apps")
class IsolatedAppsTests(SimpleTestCase):
def test_installed_apps(self):
self.assertEqual(
[app_config.label for app_config in self.class_apps.get_app_configs()],
["test_utils"],
)
def test_class_decoration(self):
class ClassDecoration(models.Model):
pass
self.assertEqual(ClassDecoration._meta.apps, self.class_apps)
@isolate_apps("test_utils", kwarg_name="method_apps")
def test_method_decoration(self, method_apps):
class MethodDecoration(models.Model):
pass
self.assertEqual(MethodDecoration._meta.apps, method_apps)
def test_context_manager(self):
with isolate_apps("test_utils") as context_apps:
class ContextManager(models.Model):
pass
self.assertEqual(ContextManager._meta.apps, context_apps)
@isolate_apps("test_utils", kwarg_name="method_apps")
def test_nested(self, method_apps):
class MethodDecoration(models.Model):
pass
with isolate_apps("test_utils") as context_apps:
class ContextManager(models.Model):
pass
with isolate_apps("test_utils") as nested_context_apps:
class NestedContextManager(models.Model):
pass
self.assertEqual(MethodDecoration._meta.apps, method_apps)
self.assertEqual(ContextManager._meta.apps, context_apps)
self.assertEqual(NestedContextManager._meta.apps, nested_context_apps)
class DoNothingDecorator(TestContextDecorator):
def enable(self):
pass
def disable(self):
pass
class TestContextDecoratorTests(SimpleTestCase):
@mock.patch.object(DoNothingDecorator, "disable")
def test_exception_in_setup(self, mock_disable):
"""An exception is setUp() is reraised after disable() is called."""
class ExceptionInSetUp(unittest.TestCase):
def setUp(self):
raise NotImplementedError("reraised")
decorator = DoNothingDecorator()
decorated_test_class = decorator.__call__(ExceptionInSetUp)()
self.assertFalse(mock_disable.called)
with self.assertRaisesMessage(NotImplementedError, "reraised"):
decorated_test_class.setUp()
decorated_test_class.doCleanups()
self.assertTrue(mock_disable.called)
def test_cleanups_run_after_tearDown(self):
calls = []
class SaveCallsDecorator(TestContextDecorator):
def enable(self):
calls.append("enable")
def disable(self):
calls.append("disable")
class AddCleanupInSetUp(unittest.TestCase):
def setUp(self):
calls.append("setUp")
self.addCleanup(lambda: calls.append("cleanup"))
decorator = SaveCallsDecorator()
decorated_test_class = decorator.__call__(AddCleanupInSetUp)()
decorated_test_class.setUp()
decorated_test_class.tearDown()
decorated_test_class.doCleanups()
self.assertEqual(calls, ["enable", "setUp", "cleanup", "disable"])
|
6ca95e32b9cfa1dc298966d4e46511dcd1b3899884a42a28002da81d3f288720 | import unittest
from io import StringIO
from django.db import connection
from django.test import TestCase
from django.test.runner import DiscoverRunner
from django.utils.version import PY311
from .models import Person
@unittest.skipUnless(
connection.vendor == "sqlite", "Only run on sqlite so we can check output SQL."
)
class TestDebugSQL(unittest.TestCase):
class PassingTest(TestCase):
def runTest(self):
Person.objects.filter(first_name="pass").count()
class FailingTest(TestCase):
def runTest(self):
Person.objects.filter(first_name="fail").count()
self.fail()
class ErrorTest(TestCase):
def runTest(self):
Person.objects.filter(first_name="error").count()
raise Exception
class ErrorSetUpTestDataTest(TestCase):
@classmethod
def setUpTestData(cls):
raise Exception
def runTest(self):
pass
class PassingSubTest(TestCase):
def runTest(self):
with self.subTest():
Person.objects.filter(first_name="subtest-pass").count()
class FailingSubTest(TestCase):
def runTest(self):
with self.subTest():
Person.objects.filter(first_name="subtest-fail").count()
self.fail()
class ErrorSubTest(TestCase):
def runTest(self):
with self.subTest():
Person.objects.filter(first_name="subtest-error").count()
raise Exception
def _test_output(self, verbosity):
runner = DiscoverRunner(debug_sql=True, verbosity=0)
suite = runner.test_suite()
suite.addTest(self.FailingTest())
suite.addTest(self.ErrorTest())
suite.addTest(self.PassingTest())
suite.addTest(self.PassingSubTest())
suite.addTest(self.FailingSubTest())
suite.addTest(self.ErrorSubTest())
old_config = runner.setup_databases()
stream = StringIO()
resultclass = runner.get_resultclass()
runner.test_runner(
verbosity=verbosity,
stream=stream,
resultclass=resultclass,
).run(suite)
runner.teardown_databases(old_config)
return stream.getvalue()
def test_output_normal(self):
full_output = self._test_output(1)
for output in self.expected_outputs:
self.assertIn(output, full_output)
for output in self.verbose_expected_outputs:
self.assertNotIn(output, full_output)
def test_output_verbose(self):
full_output = self._test_output(2)
for output in self.expected_outputs:
self.assertIn(output, full_output)
for output in self.verbose_expected_outputs:
self.assertIn(output, full_output)
expected_outputs = [
(
"""SELECT COUNT(*) AS "__count" """
"""FROM "test_runner_person" WHERE """
""""test_runner_person"."first_name" = 'error';"""
),
(
"""SELECT COUNT(*) AS "__count" """
"""FROM "test_runner_person" WHERE """
""""test_runner_person"."first_name" = 'fail';"""
),
(
"""SELECT COUNT(*) AS "__count" """
"""FROM "test_runner_person" WHERE """
""""test_runner_person"."first_name" = 'subtest-error';"""
),
(
"""SELECT COUNT(*) AS "__count" """
"""FROM "test_runner_person" WHERE """
""""test_runner_person"."first_name" = 'subtest-fail';"""
),
]
# Python 3.11 uses fully qualified test name in the output.
method_name = ".runTest" if PY311 else ""
test_class_path = "test_runner.test_debug_sql.TestDebugSQL"
verbose_expected_outputs = [
f"runTest ({test_class_path}.FailingTest{method_name}) ... FAIL",
f"runTest ({test_class_path}.ErrorTest{method_name}) ... ERROR",
f"runTest ({test_class_path}.PassingTest{method_name}) ... ok",
# If there are errors/failures in subtests but not in test itself,
# the status is not written. That behavior comes from Python.
f"runTest ({test_class_path}.FailingSubTest{method_name}) ...",
f"runTest ({test_class_path}.ErrorSubTest{method_name}) ...",
(
"""SELECT COUNT(*) AS "__count" """
"""FROM "test_runner_person" WHERE """
""""test_runner_person"."first_name" = 'pass';"""
),
(
"""SELECT COUNT(*) AS "__count" """
"""FROM "test_runner_person" WHERE """
""""test_runner_person"."first_name" = 'subtest-pass';"""
),
]
def test_setupclass_exception(self):
runner = DiscoverRunner(debug_sql=True, verbosity=0)
suite = runner.test_suite()
suite.addTest(self.ErrorSetUpTestDataTest())
old_config = runner.setup_databases()
stream = StringIO()
runner.test_runner(
verbosity=0,
stream=stream,
resultclass=runner.get_resultclass(),
).run(suite)
runner.teardown_databases(old_config)
output = stream.getvalue()
self.assertIn(
"ERROR: setUpClass "
"(test_runner.test_debug_sql.TestDebugSQL.ErrorSetUpTestDataTest)",
output,
)
|
3423459ef2e8b9809c2e13c83248fcd78e47d39dc1c40417f60df397e2c2f4b1 | import pickle
import sys
import unittest
from django.test import SimpleTestCase
from django.test.runner import RemoteTestResult
from django.utils.version import PY311
try:
import tblib.pickling_support
except ImportError:
tblib = None
class ExceptionThatFailsUnpickling(Exception):
"""
After pickling, this class fails unpickling with an error about incorrect
arguments passed to __init__().
"""
def __init__(self, arg):
super().__init__()
class ParallelTestRunnerTest(SimpleTestCase):
"""
End-to-end tests of the parallel test runner.
These tests are only meaningful when running tests in parallel using
the --parallel option, though it doesn't hurt to run them not in
parallel.
"""
def test_subtest(self):
"""
Passing subtests work.
"""
for i in range(2):
with self.subTest(index=i):
self.assertEqual(i, i)
class SampleFailingSubtest(SimpleTestCase):
# This method name doesn't begin with "test" to prevent test discovery
# from seeing it.
def dummy_test(self):
"""
A dummy test for testing subTest failures.
"""
for i in range(3):
with self.subTest(index=i):
self.assertEqual(i, 1)
class RemoteTestResultTest(SimpleTestCase):
def _test_error_exc_info(self):
try:
raise ValueError("woops")
except ValueError:
return sys.exc_info()
def test_was_successful_no_events(self):
result = RemoteTestResult()
self.assertIs(result.wasSuccessful(), True)
def test_was_successful_one_success(self):
result = RemoteTestResult()
result.addSuccess(None)
self.assertIs(result.wasSuccessful(), True)
def test_was_successful_one_expected_failure(self):
result = RemoteTestResult()
result.addExpectedFailure(None, self._test_error_exc_info())
self.assertIs(result.wasSuccessful(), True)
def test_was_successful_one_skip(self):
result = RemoteTestResult()
result.addSkip(None, "Skipped")
self.assertIs(result.wasSuccessful(), True)
@unittest.skipUnless(tblib is not None, "requires tblib to be installed")
def test_was_successful_one_error(self):
result = RemoteTestResult()
result.addError(None, self._test_error_exc_info())
self.assertIs(result.wasSuccessful(), False)
@unittest.skipUnless(tblib is not None, "requires tblib to be installed")
def test_was_successful_one_failure(self):
result = RemoteTestResult()
result.addFailure(None, self._test_error_exc_info())
self.assertIs(result.wasSuccessful(), False)
def test_picklable(self):
result = RemoteTestResult()
loaded_result = pickle.loads(pickle.dumps(result))
self.assertEqual(result.events, loaded_result.events)
def test_pickle_errors_detection(self):
picklable_error = RuntimeError("This is fine")
not_unpicklable_error = ExceptionThatFailsUnpickling("arg")
result = RemoteTestResult()
result._confirm_picklable(picklable_error)
msg = "__init__() missing 1 required positional argument"
with self.assertRaisesMessage(TypeError, msg):
result._confirm_picklable(not_unpicklable_error)
@unittest.skipUnless(tblib is not None, "requires tblib to be installed")
def test_add_failing_subtests(self):
"""
Failing subtests are added correctly using addSubTest().
"""
# Manually run a test with failing subtests to prevent the failures
# from affecting the actual test run.
result = RemoteTestResult()
subtest_test = SampleFailingSubtest(methodName="dummy_test")
subtest_test.run(result=result)
events = result.events
self.assertEqual(len(events), 4)
self.assertIs(result.wasSuccessful(), False)
event = events[1]
self.assertEqual(event[0], "addSubTest")
self.assertEqual(
str(event[2]),
"dummy_test (test_runner.test_parallel.SampleFailingSubtest%s) (index=0)"
# Python 3.11 uses fully qualified test name in the output.
% (".dummy_test" if PY311 else ""),
)
self.assertEqual(repr(event[3][1]), "AssertionError('0 != 1')")
event = events[2]
self.assertEqual(repr(event[3][1]), "AssertionError('2 != 1')")
|
1aca00f6f671ebef1dd158186b312c4b9604360de6028a0d66e449e54ce60159 | import datetime
import itertools
import unittest
from copy import copy
from unittest import mock
from django.core.exceptions import FieldError
from django.core.management.color import no_style
from django.db import (
DatabaseError,
DataError,
IntegrityError,
OperationalError,
connection,
)
from django.db.models import (
CASCADE,
PROTECT,
AutoField,
BigAutoField,
BigIntegerField,
BinaryField,
BooleanField,
CharField,
CheckConstraint,
DateField,
DateTimeField,
DecimalField,
DurationField,
F,
FloatField,
ForeignKey,
ForeignObject,
Index,
IntegerField,
JSONField,
ManyToManyField,
Model,
OneToOneField,
OrderBy,
PositiveIntegerField,
Q,
SlugField,
SmallAutoField,
SmallIntegerField,
TextField,
TimeField,
UniqueConstraint,
UUIDField,
Value,
)
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import Abs, Cast, Collate, Lower, Random, Upper
from django.db.models.indexes import IndexExpression
from django.db.transaction import TransactionManagementError, atomic
from django.test import TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext, isolate_apps, register_lookup
from .fields import CustomManyToManyField, InheritedManyToManyField, MediumBlobField
from .models import (
Author,
AuthorCharFieldWithIndex,
AuthorTextFieldWithIndex,
AuthorWithDefaultHeight,
AuthorWithEvenLongerName,
AuthorWithIndexedName,
AuthorWithIndexedNameAndBirthday,
AuthorWithUniqueName,
AuthorWithUniqueNameAndBirthday,
Book,
BookForeignObj,
BookWeak,
BookWithLongName,
BookWithO2O,
BookWithoutAuthor,
BookWithSlug,
IntegerPK,
Node,
Note,
NoteRename,
Tag,
TagIndexed,
TagM2MTest,
TagUniqueRename,
Thing,
UniqueTest,
new_apps,
)
class SchemaTests(TransactionTestCase):
"""
Tests for the schema-alteration code.
Be aware that these tests are more liable than most to false results,
as sometimes the code to check if a test has worked is almost as complex
as the code it is testing.
"""
available_apps = []
models = [
Author,
AuthorCharFieldWithIndex,
AuthorTextFieldWithIndex,
AuthorWithDefaultHeight,
AuthorWithEvenLongerName,
Book,
BookWeak,
BookWithLongName,
BookWithO2O,
BookWithSlug,
IntegerPK,
Node,
Note,
Tag,
TagIndexed,
TagM2MTest,
TagUniqueRename,
Thing,
UniqueTest,
]
# Utility functions
def setUp(self):
# local_models should contain test dependent model classes that will be
# automatically removed from the app cache on test tear down.
self.local_models = []
# isolated_local_models contains models that are in test methods
# decorated with @isolate_apps.
self.isolated_local_models = []
def tearDown(self):
# Delete any tables made for our models
self.delete_tables()
new_apps.clear_cache()
for model in new_apps.get_models():
model._meta._expire_cache()
if "schema" in new_apps.all_models:
for model in self.local_models:
for many_to_many in model._meta.many_to_many:
through = many_to_many.remote_field.through
if through and through._meta.auto_created:
del new_apps.all_models["schema"][through._meta.model_name]
del new_apps.all_models["schema"][model._meta.model_name]
if self.isolated_local_models:
with connection.schema_editor() as editor:
for model in self.isolated_local_models:
editor.delete_model(model)
def delete_tables(self):
"Deletes all model tables for our models for a clean test environment"
converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
connection.disable_constraint_checking()
table_names = connection.introspection.table_names()
if connection.features.ignores_table_name_case:
table_names = [table_name.lower() for table_name in table_names]
for model in itertools.chain(SchemaTests.models, self.local_models):
tbl = converter(model._meta.db_table)
if connection.features.ignores_table_name_case:
tbl = tbl.lower()
if tbl in table_names:
editor.delete_model(model)
table_names.remove(tbl)
connection.enable_constraint_checking()
def column_classes(self, model):
with connection.cursor() as cursor:
columns = {
d[0]: (connection.introspection.get_field_type(d[1], d), d)
for d in connection.introspection.get_table_description(
cursor,
model._meta.db_table,
)
}
# SQLite has a different format for field_type
for name, (type, desc) in columns.items():
if isinstance(type, tuple):
columns[name] = (type[0], desc)
return columns
def get_primary_key(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_primary_key_column(cursor, table)
def get_indexes(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return [
c["columns"][0]
for c in connection.introspection.get_constraints(
cursor, table
).values()
if c["index"] and len(c["columns"]) == 1
]
def get_uniques(self, table):
with connection.cursor() as cursor:
return [
c["columns"][0]
for c in connection.introspection.get_constraints(
cursor, table
).values()
if c["unique"] and len(c["columns"]) == 1
]
def get_constraints(self, table):
"""
Get the constraints on a table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def get_constraints_for_column(self, model, column_name):
constraints = self.get_constraints(model._meta.db_table)
constraints_for_column = []
for name, details in constraints.items():
if details["columns"] == [column_name]:
constraints_for_column.append(name)
return sorted(constraints_for_column)
def check_added_field_default(
self,
schema_editor,
model,
field,
field_name,
expected_default,
cast_function=None,
):
with connection.cursor() as cursor:
schema_editor.add_field(model, field)
cursor.execute(
"SELECT {} FROM {};".format(field_name, model._meta.db_table)
)
database_default = cursor.fetchall()[0][0]
if cast_function and type(database_default) != type(expected_default):
database_default = cast_function(database_default)
self.assertEqual(database_default, expected_default)
def get_constraints_count(self, table, column, fk_to):
"""
Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the
number of foreign keys, unique constraints, and indexes on
`table`.`column`. The `fk_to` argument is a 2-tuple specifying the
expected foreign key relationship's (table, column).
"""
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(cursor, table)
counts = {"fks": 0, "uniques": 0, "indexes": 0}
for c in constraints.values():
if c["columns"] == [column]:
if c["foreign_key"] == fk_to:
counts["fks"] += 1
if c["unique"]:
counts["uniques"] += 1
elif c["index"]:
counts["indexes"] += 1
return counts
def get_column_collation(self, table, column):
with connection.cursor() as cursor:
return next(
f.collation
for f in connection.introspection.get_table_description(cursor, table)
if f.name == column
)
def assertIndexOrder(self, table, index, order):
constraints = self.get_constraints(table)
self.assertIn(index, constraints)
index_orders = constraints[index]["orders"]
self.assertTrue(
all(val == expected for val, expected in zip(index_orders, order))
)
def assertForeignKeyExists(self, model, column, expected_fk_table, field="id"):
"""
Fail if the FK constraint on `model.Meta.db_table`.`column` to
`expected_fk_table`.id doesn't exist.
"""
constraints = self.get_constraints(model._meta.db_table)
constraint_fk = None
for details in constraints.values():
if details["columns"] == [column] and details["foreign_key"]:
constraint_fk = details["foreign_key"]
break
self.assertEqual(constraint_fk, (expected_fk_table, field))
def assertForeignKeyNotExists(self, model, column, expected_fk_table):
with self.assertRaises(AssertionError):
self.assertForeignKeyExists(model, column, expected_fk_table)
# Tests
def test_creation_deletion(self):
"""
Tries creating a model's table, and then deleting it.
"""
with connection.schema_editor() as editor:
# Create the table
editor.create_model(Author)
# The table is there
list(Author.objects.all())
# Clean up that table
editor.delete_model(Author)
# No deferred SQL should be left over.
self.assertEqual(editor.deferred_sql, [])
# The table is gone
with self.assertRaises(DatabaseError):
list(Author.objects.all())
@skipUnlessDBFeature("supports_foreign_keys")
def test_fk(self):
"Creating tables out of FK order, then repointing, works"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Book)
editor.create_model(Author)
editor.create_model(Tag)
# Initial tables are there
list(Author.objects.all())
list(Book.objects.all())
# Make sure the FK constraint is present
with self.assertRaises(IntegrityError):
Book.objects.create(
author_id=1,
title="Much Ado About Foreign Keys",
pub_date=datetime.datetime.now(),
)
# Repoint the FK constraint
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Tag, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
self.assertForeignKeyExists(Book, "author_id", "schema_tag")
@skipUnlessDBFeature("can_create_inline_fk")
def test_inline_fk(self):
# Create some tables.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.create_model(Note)
self.assertForeignKeyNotExists(Note, "book_id", "schema_book")
# Add a foreign key from one to the other.
with connection.schema_editor() as editor:
new_field = ForeignKey(Book, CASCADE)
new_field.set_attributes_from_name("book")
editor.add_field(Note, new_field)
self.assertForeignKeyExists(Note, "book_id", "schema_book")
# Creating a FK field with a constraint uses a single statement without
# a deferred ALTER TABLE.
self.assertFalse(
[
sql
for sql in (str(statement) for statement in editor.deferred_sql)
if sql.startswith("ALTER TABLE") and "ADD CONSTRAINT" in sql
]
)
@skipUnlessDBFeature("can_create_inline_fk")
def test_add_inline_fk_update_data(self):
with connection.schema_editor() as editor:
editor.create_model(Node)
# Add an inline foreign key and update data in the same transaction.
new_field = ForeignKey(Node, CASCADE, related_name="new_fk", null=True)
new_field.set_attributes_from_name("new_parent_fk")
parent = Node.objects.create()
with connection.schema_editor() as editor:
editor.add_field(Node, new_field)
editor.execute("UPDATE schema_node SET new_parent_fk_id = %s;", [parent.pk])
assertIndex = (
self.assertIn
if connection.features.indexes_foreign_keys
else self.assertNotIn
)
assertIndex("new_parent_fk_id", self.get_indexes(Node._meta.db_table))
@skipUnlessDBFeature(
"can_create_inline_fk",
"allows_multiple_constraints_on_same_fields",
)
@isolate_apps("schema")
def test_add_inline_fk_index_update_data(self):
class Node(Model):
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Node)
# Add an inline foreign key, update data, and an index in the same
# transaction.
new_field = ForeignKey(Node, CASCADE, related_name="new_fk", null=True)
new_field.set_attributes_from_name("new_parent_fk")
parent = Node.objects.create()
with connection.schema_editor() as editor:
editor.add_field(Node, new_field)
Node._meta.add_field(new_field)
editor.execute("UPDATE schema_node SET new_parent_fk_id = %s;", [parent.pk])
editor.add_index(
Node, Index(fields=["new_parent_fk"], name="new_parent_inline_fk_idx")
)
self.assertIn("new_parent_fk_id", self.get_indexes(Node._meta.db_table))
@skipUnlessDBFeature("supports_foreign_keys")
def test_char_field_with_db_index_to_fk(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorCharFieldWithIndex)
# Change CharField to FK
old_field = AuthorCharFieldWithIndex._meta.get_field("char_field")
new_field = ForeignKey(Author, CASCADE, blank=True)
new_field.set_attributes_from_name("char_field")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorCharFieldWithIndex, old_field, new_field, strict=True
)
self.assertForeignKeyExists(
AuthorCharFieldWithIndex, "char_field_id", "schema_author"
)
@skipUnlessDBFeature("supports_foreign_keys")
@skipUnlessDBFeature("supports_index_on_text_field")
def test_text_field_with_db_index_to_fk(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorTextFieldWithIndex)
# Change TextField to FK
old_field = AuthorTextFieldWithIndex._meta.get_field("text_field")
new_field = ForeignKey(Author, CASCADE, blank=True)
new_field.set_attributes_from_name("text_field")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorTextFieldWithIndex, old_field, new_field, strict=True
)
self.assertForeignKeyExists(
AuthorTextFieldWithIndex, "text_field_id", "schema_author"
)
@isolate_apps("schema")
def test_char_field_pk_to_auto_field(self):
class Foo(Model):
id = CharField(max_length=255, primary_key=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Foo
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
@skipUnlessDBFeature("supports_foreign_keys")
def test_fk_to_proxy(self):
"Creating a FK to a proxy model creates database constraints."
class AuthorProxy(Author):
class Meta:
app_label = "schema"
apps = new_apps
proxy = True
class AuthorRef(Model):
author = ForeignKey(AuthorProxy, on_delete=CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [AuthorProxy, AuthorRef]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorRef)
self.assertForeignKeyExists(AuthorRef, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys")
def test_fk_db_constraint(self):
"The db_constraint parameter is respected"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Author)
editor.create_model(BookWeak)
# Initial tables are there
list(Author.objects.all())
list(Tag.objects.all())
list(BookWeak.objects.all())
self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author")
# Make a db_constraint=False FK
new_field = ForeignKey(Tag, CASCADE, db_constraint=False)
new_field.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag")
# Alter to one with a constraint
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
self.assertForeignKeyExists(Author, "tag_id", "schema_tag")
# Alter to one without a constraint again
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field2, new_field, strict=True)
self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag")
@isolate_apps("schema")
def test_no_db_constraint_added_during_primary_key_change(self):
"""
When a primary key that's pointed to by a ForeignKey with
db_constraint=False is altered, a foreign key constraint isn't added.
"""
class Author(Model):
class Meta:
app_label = "schema"
class BookWeak(Model):
author = ForeignKey(Author, CASCADE, db_constraint=False)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWeak)
self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author")
old_field = Author._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = Author
new_field.set_attributes_from_name("id")
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author")
def _test_m2m_db_constraint(self, M2MFieldClass):
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(LocalAuthorWithM2M)
# Initial tables are there
list(LocalAuthorWithM2M.objects.all())
list(Tag.objects.all())
# Make a db_constraint=False FK
new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False)
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
self.assertForeignKeyNotExists(
new_field.remote_field.through, "tag_id", "schema_tag"
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_m2m_db_constraint(self):
self._test_m2m_db_constraint(ManyToManyField)
@skipUnlessDBFeature("supports_foreign_keys")
def test_m2m_db_constraint_custom(self):
self._test_m2m_db_constraint(CustomManyToManyField)
@skipUnlessDBFeature("supports_foreign_keys")
def test_m2m_db_constraint_inherited(self):
self._test_m2m_db_constraint(InheritedManyToManyField)
def test_add_field(self):
"""
Tests adding fields to models
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add the new field
new_field = IntegerField(null=True)
new_field.set_attributes_from_name("age")
with CaptureQueriesContext(
connection
) as ctx, connection.schema_editor() as editor:
editor.add_field(Author, new_field)
drop_default_sql = editor.sql_alter_column_no_default % {
"column": editor.quote_name(new_field.name),
}
self.assertFalse(
any(drop_default_sql in query["sql"] for query in ctx.captured_queries)
)
# Table is not rebuilt.
self.assertIs(
any("CREATE TABLE" in query["sql"] for query in ctx.captured_queries), False
)
self.assertIs(
any("DROP TABLE" in query["sql"] for query in ctx.captured_queries), False
)
columns = self.column_classes(Author)
self.assertEqual(
columns["age"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertTrue(columns["age"][1][6])
def test_add_field_remove_field(self):
"""
Adding a field and removing it removes all deferred sql referring to it.
"""
with connection.schema_editor() as editor:
# Create a table with a unique constraint on the slug field.
editor.create_model(Tag)
# Remove the slug column.
editor.remove_field(Tag, Tag._meta.get_field("slug"))
self.assertEqual(editor.deferred_sql, [])
def test_add_field_temp_default(self):
"""
Tests adding fields to models with a temporary default
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = CharField(max_length=30, default="Godwin")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(
columns["surname"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(
columns["surname"][1][6],
connection.features.interprets_empty_strings_as_nulls,
)
def test_add_field_temp_default_boolean(self):
"""
Tests adding fields to models with a temporary default where
the default is False. (#21783)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = BooleanField(default=False)
new_field.set_attributes_from_name("awesome")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# BooleanField are stored as TINYINT(1) on MySQL.
field_type = columns["awesome"][0]
self.assertEqual(
field_type, connection.features.introspected_field_types["BooleanField"]
)
def test_add_field_default_transform(self):
"""
Tests adding fields to models with a default that is not directly
valid in the database (#22581)
"""
class TestTransformField(IntegerField):
# Weird field that saves the count of items in its value
def get_default(self):
return self.default
def get_prep_value(self, value):
if value is None:
return 0
return len(value)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add the field with a default it needs to cast (to string in this case)
new_field = TestTransformField(default={1: 2})
new_field.set_attributes_from_name("thing")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is there
columns = self.column_classes(Author)
field_type, field_info = columns["thing"]
self.assertEqual(
field_type, connection.features.introspected_field_types["IntegerField"]
)
# Make sure the values were transformed correctly
self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2)
def test_add_field_o2o_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Note)
new_field = OneToOneField(Note, CASCADE, null=True)
new_field.set_attributes_from_name("note")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertIn("note_id", columns)
self.assertTrue(columns["note_id"][1][6])
def test_add_field_binary(self):
"""
Tests binary fields get a sane default (#22851)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field
new_field = BinaryField(blank=True)
new_field.set_attributes_from_name("bits")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# MySQL annoyingly uses the same backend, so it'll come back as one of
# these two types.
self.assertIn(columns["bits"][0], ("BinaryField", "TextField"))
def test_add_field_durationfield_with_default(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
new_field = DurationField(default=datetime.timedelta(minutes=10))
new_field.set_attributes_from_name("duration")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(
columns["duration"][0],
connection.features.introspected_field_types["DurationField"],
)
@unittest.skipUnless(connection.vendor == "mysql", "MySQL specific")
def test_add_binaryfield_mediumblob(self):
"""
Test adding a custom-sized binary field on MySQL (#24846).
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field with default
new_field = MediumBlobField(blank=True, default=b"123")
new_field.set_attributes_from_name("bits")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# Introspection treats BLOBs as TextFields
self.assertEqual(columns["bits"][0], "TextField")
def test_remove_field(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with CaptureQueriesContext(connection) as ctx:
editor.remove_field(Author, Author._meta.get_field("name"))
columns = self.column_classes(Author)
self.assertNotIn("name", columns)
if getattr(connection.features, "can_alter_table_drop_column", True):
# Table is not rebuilt.
self.assertIs(
any("CREATE TABLE" in query["sql"] for query in ctx.captured_queries),
False,
)
self.assertIs(
any("DROP TABLE" in query["sql"] for query in ctx.captured_queries),
False,
)
def test_alter(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(
bool(columns["name"][1][6]),
bool(connection.features.interprets_empty_strings_as_nulls),
)
# Alter the name field to a TextField
old_field = Author._meta.get_field("name")
new_field = TextField(null=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertEqual(columns["name"][0], "TextField")
self.assertTrue(columns["name"][1][6])
# Change nullability again
new_field2 = TextField(null=False)
new_field2.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
columns = self.column_classes(Author)
self.assertEqual(columns["name"][0], "TextField")
self.assertEqual(
bool(columns["name"][1][6]),
bool(connection.features.interprets_empty_strings_as_nulls),
)
def test_alter_auto_field_to_integer_field(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change AutoField to IntegerField
old_field = Author._meta.get_field("id")
new_field = IntegerField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Now that ID is an IntegerField, the database raises an error if it
# isn't provided.
if not connection.features.supports_unspecified_pk:
with self.assertRaises(DatabaseError):
Author.objects.create()
def test_alter_auto_field_to_char_field(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change AutoField to CharField
old_field = Author._meta.get_field("id")
new_field = CharField(primary_key=True, max_length=50)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
@isolate_apps("schema")
def test_alter_auto_field_quoted_db_column(self):
class Foo(Model):
id = AutoField(primary_key=True, db_column='"quoted_id"')
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = Foo
new_field.db_column = '"quoted_id"'
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
Foo.objects.create()
def test_alter_not_unique_field_to_primary_key(self):
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change UUIDField to primary key.
old_field = Author._meta.get_field("uuid")
new_field = UUIDField(primary_key=True)
new_field.set_attributes_from_name("uuid")
new_field.model = Author
with connection.schema_editor() as editor:
editor.remove_field(Author, Author._meta.get_field("id"))
editor.alter_field(Author, old_field, new_field, strict=True)
# Redundant unique constraint is not added.
count = self.get_constraints_count(
Author._meta.db_table,
Author._meta.get_field("uuid").column,
None,
)
self.assertLessEqual(count["uniques"], 1)
@isolate_apps("schema")
def test_alter_primary_key_quoted_db_table(self):
class Foo(Model):
class Meta:
app_label = "schema"
db_table = '"foo"'
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = Foo
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
Foo.objects.create()
def test_alter_text_field(self):
# Regression for "BLOB/TEXT column 'info' can't have a default value")
# on MySQL.
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("info")
new_field = TextField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
def test_alter_text_field_to_not_null_with_default_value(self):
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("address")
new_field = TextField(blank=True, default="", null=False)
new_field.set_attributes_from_name("address")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
@skipUnlessDBFeature("can_defer_constraint_checks", "can_rollback_ddl")
def test_alter_fk_checks_deferred_constraints(self):
"""
#25492 - Altering a foreign key's structure and data in the same
transaction.
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field("parent")
new_field = ForeignKey(Node, CASCADE)
new_field.set_attributes_from_name("parent")
parent = Node.objects.create()
with connection.schema_editor() as editor:
# Update the parent FK to create a deferred constraint check.
Node.objects.update(parent=parent)
editor.alter_field(Node, old_field, new_field, strict=True)
def test_alter_text_field_to_date_field(self):
"""
#25002 - Test conversion of text field to date field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info="1988-05-05")
old_field = Note._meta.get_field("info")
new_field = DateField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns["info"][1][6])
def test_alter_text_field_to_datetime_field(self):
"""
#25002 - Test conversion of text field to datetime field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info="1988-05-05 3:16:17.4567")
old_field = Note._meta.get_field("info")
new_field = DateTimeField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns["info"][1][6])
def test_alter_text_field_to_time_field(self):
"""
#25002 - Test conversion of text field to time field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info="3:16:17.4567")
old_field = Note._meta.get_field("info")
new_field = TimeField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns["info"][1][6])
@skipIfDBFeature("interprets_empty_strings_as_nulls")
def test_alter_textual_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = CharField(max_length=50)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
@skipUnlessDBFeature("interprets_empty_strings_as_nulls")
def test_alter_textual_field_not_null_to_null(self):
"""
Nullability for textual fields is preserved on databases that
interpret empty strings as NULLs.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
columns = self.column_classes(Author)
# Field is nullable.
self.assertTrue(columns["uuid"][1][6])
# Change to NOT NULL.
old_field = Author._meta.get_field("uuid")
new_field = SlugField(null=False, blank=True)
new_field.set_attributes_from_name("uuid")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
# Nullability is preserved.
self.assertTrue(columns["uuid"][1][6])
def test_alter_numeric_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug="aaa")
old_field = UniqueTest._meta.get_field("year")
new_field = BigIntegerField()
new_field.set_attributes_from_name("year")
with connection.schema_editor() as editor:
editor.alter_field(UniqueTest, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug="bbb")
def test_alter_null_to_not_null(self):
"""
#23609 - Tests handling of default values when altering from NULL to NOT NULL.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertTrue(columns["height"][1][6])
# Create some test data
Author.objects.create(name="Not null author", height=12)
Author.objects.create(name="Null author")
# Verify null value
self.assertEqual(Author.objects.get(name="Not null author").height, 12)
self.assertIsNone(Author.objects.get(name="Null author").height)
# Alter the height field to NOT NULL with default
old_field = Author._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertFalse(columns["height"][1][6])
# Verify default value
self.assertEqual(Author.objects.get(name="Not null author").height, 12)
self.assertEqual(Author.objects.get(name="Null author").height, 42)
def test_alter_charfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_nulls when changing a CharField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change the CharField to null
old_field = Author._meta.get_field("name")
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_char_field_decrease_length(self):
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
Author.objects.create(name="x" * 255)
# Change max_length of CharField.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
msg = "value too long for type character varying(254)"
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(Author, old_field, new_field, strict=True)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_with_custom_db_type(self):
from django.contrib.postgres.fields import ArrayField
class Foo(Model):
field = ArrayField(CharField(max_length=255))
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("field")
new_field = ArrayField(CharField(max_length=16))
new_field.set_attributes_from_name("field")
new_field.model = Foo
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_array_field_decrease_base_field_length(self):
from django.contrib.postgres.fields import ArrayField
class ArrayModel(Model):
field = ArrayField(CharField(max_length=16))
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(ArrayModel)
self.isolated_local_models = [ArrayModel]
ArrayModel.objects.create(field=["x" * 16])
old_field = ArrayModel._meta.get_field("field")
new_field = ArrayField(CharField(max_length=15))
new_field.set_attributes_from_name("field")
new_field.model = ArrayModel
with connection.schema_editor() as editor:
msg = "value too long for type character varying(15)"
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(ArrayModel, old_field, new_field, strict=True)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_array_field_decrease_nested_base_field_length(self):
from django.contrib.postgres.fields import ArrayField
class ArrayModel(Model):
field = ArrayField(ArrayField(CharField(max_length=16)))
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(ArrayModel)
self.isolated_local_models = [ArrayModel]
ArrayModel.objects.create(field=[["x" * 16]])
old_field = ArrayModel._meta.get_field("field")
new_field = ArrayField(ArrayField(CharField(max_length=15)))
new_field.set_attributes_from_name("field")
new_field.model = ArrayModel
with connection.schema_editor() as editor:
msg = "value too long for type character varying(15)"
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(ArrayModel, old_field, new_field, strict=True)
def test_alter_textfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_nulls when changing a TextField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
# Change the TextField to null
old_field = Note._meta.get_field("info")
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
def test_alter_null_to_not_null_keeping_default(self):
"""
#23738 - Can change a nullable field with default to non-nullable
with the same default.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
# Ensure the field is right to begin with
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertTrue(columns["height"][1][6])
# Alter the height field to NOT NULL keeping the previous default
old_field = AuthorWithDefaultHeight._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorWithDefaultHeight, old_field, new_field, strict=True
)
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertFalse(columns["height"][1][6])
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_fk(self):
"""
Tests altering of FKs
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
# Alter the FK
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE, editable=False)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_to_fk(self):
"""
#24447 - Tests adding a FK constraint for an existing column
"""
class LocalBook(Model):
author = IntegerField()
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalBook]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBook)
# Ensure no FK constraint exists
constraints = self.get_constraints(LocalBook._meta.db_table)
for details in constraints.values():
if details["foreign_key"]:
self.fail(
"Found an unexpected FK constraint to %s" % details["columns"]
)
old_field = LocalBook._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(LocalBook, old_field, new_field, strict=True)
self.assertForeignKeyExists(LocalBook, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_o2o_to_fk(self):
"""
#24163 - Tests altering of OneToOneField to ForeignKey
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
# Ensure the field is right to begin with
columns = self.column_classes(BookWithO2O)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is unique
author = Author.objects.create(name="Joe")
BookWithO2O.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
BookWithO2O.objects.all().delete()
self.assertForeignKeyExists(BookWithO2O, "author_id", "schema_author")
# Alter the OneToOneField to ForeignKey
old_field = BookWithO2O._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is not unique anymore
Book.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
Book.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_fk_to_o2o(self):
"""
#24163 - Tests altering of ForeignKey to OneToOneField
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is not unique
author = Author.objects.create(name="Joe")
Book.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
Book.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
Book.objects.all().delete()
self.assertForeignKeyExists(Book, "author_id", "schema_author")
# Alter the ForeignKey to OneToOneField
old_field = Book._meta.get_field("author")
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
columns = self.column_classes(BookWithO2O)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is unique now
BookWithO2O.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
self.assertForeignKeyExists(BookWithO2O, "author_id", "schema_author")
def test_alter_field_fk_to_o2o(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
expected_indexes = 1 if connection.features.indexes_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(
counts,
{"fks": expected_fks, "uniques": 0, "indexes": expected_indexes},
)
old_field = Book._meta.get_field("author")
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index on ForeignKey is replaced with a unique constraint for
# OneToOneField.
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
def test_alter_field_fk_keeps_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
expected_indexes = 1 if connection.features.indexes_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(
counts,
{"fks": expected_fks, "uniques": 0, "indexes": expected_indexes},
)
old_field = Book._meta.get_field("author")
# on_delete changed from CASCADE.
new_field = ForeignKey(Author, PROTECT)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index remains.
self.assertEqual(
counts,
{"fks": expected_fks, "uniques": 0, "indexes": expected_indexes},
)
def test_alter_field_o2o_to_fk(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
old_field = BookWithO2O._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint on OneToOneField is replaced with an index for
# ForeignKey.
self.assertEqual(counts, {"fks": expected_fks, "uniques": 0, "indexes": 1})
def test_alter_field_o2o_keeps_unique(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
old_field = BookWithO2O._meta.get_field("author")
# on_delete changed from CASCADE.
new_field = OneToOneField(Author, PROTECT)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint remains.
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
@skipUnlessDBFeature("ignores_table_name_case")
def test_alter_db_table_case(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Alter the case of the table
old_table_name = Author._meta.db_table
with connection.schema_editor() as editor:
editor.alter_db_table(Author, old_table_name, old_table_name.upper())
def test_alter_implicit_id_to_explicit(self):
"""
Should be able to convert an implicit "id" field to an explicit "id"
primary key field.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# This will fail if DROP DEFAULT is inadvertently executed on this
# field which drops the id sequence, at least on PostgreSQL.
Author.objects.create(name="Foo")
Author.objects.create(name="Bar")
def test_alter_autofield_pk_to_bigautofield_pk_sequence_owner(self):
"""
Converting an implicit PK to BigAutoField(primary_key=True) should keep
a sequence owner on PostgreSQL.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
Author.objects.create(name="Foo", pk=1)
with connection.cursor() as cursor:
sequence_reset_sqls = connection.ops.sequence_reset_sql(
no_style(), [Author]
)
if sequence_reset_sqls:
cursor.execute(sequence_reset_sqls[0])
# Fail on PostgreSQL if sequence is missing an owner.
self.assertIsNotNone(Author.objects.create(name="Bar"))
def test_alter_autofield_pk_to_smallautofield_pk_sequence_owner(self):
"""
Converting an implicit PK to SmallAutoField(primary_key=True) should
keep a sequence owner on PostgreSQL.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = SmallAutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
Author.objects.create(name="Foo", pk=1)
with connection.cursor() as cursor:
sequence_reset_sqls = connection.ops.sequence_reset_sql(
no_style(), [Author]
)
if sequence_reset_sqls:
cursor.execute(sequence_reset_sqls[0])
# Fail on PostgreSQL if sequence is missing an owner.
self.assertIsNotNone(Author.objects.create(name="Bar"))
def test_alter_int_pk_to_autofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
AutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field("i")
new_field = AutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# A model representing the updated model.
class IntegerPKToAutoField(Model):
i = AutoField(primary_key=True)
j = IntegerField(unique=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = IntegerPK._meta.db_table
# An id (i) is generated by the database.
obj = IntegerPKToAutoField.objects.create(j=1)
self.assertIsNotNone(obj.i)
def test_alter_int_pk_to_bigautofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
BigAutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field("i")
new_field = BigAutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# A model representing the updated model.
class IntegerPKToBigAutoField(Model):
i = BigAutoField(primary_key=True)
j = IntegerField(unique=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = IntegerPK._meta.db_table
# An id (i) is generated by the database.
obj = IntegerPKToBigAutoField.objects.create(j=1)
self.assertIsNotNone(obj.i)
@isolate_apps("schema")
def test_alter_smallint_pk_to_smallautofield_pk(self):
"""
Should be able to rename an SmallIntegerField(primary_key=True) to
SmallAutoField(primary_key=True).
"""
class SmallIntegerPK(Model):
i = SmallIntegerField(primary_key=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(SmallIntegerPK)
self.isolated_local_models = [SmallIntegerPK]
old_field = SmallIntegerPK._meta.get_field("i")
new_field = SmallAutoField(primary_key=True)
new_field.model = SmallIntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True)
def test_alter_int_pk_to_int_unique(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
IntegerField(unique=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
# Delete the old PK
old_field = IntegerPK._meta.get_field("i")
new_field = IntegerField(unique=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# The primary key constraint is gone. Result depends on database:
# 'id' for SQLite, None for others (must not be 'i').
self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ("id", None))
# Set up a model class as it currently stands. The original IntegerPK
# class is now out of date and some backends make use of the whole
# model class when modifying a field (such as sqlite3 when remaking a
# table) so an outdated model class leads to incorrect results.
class Transitional(Model):
i = IntegerField(unique=True)
j = IntegerField(unique=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = "INTEGERPK"
# model requires a new PK
old_field = Transitional._meta.get_field("j")
new_field = IntegerField(primary_key=True)
new_field.model = Transitional
new_field.set_attributes_from_name("j")
with connection.schema_editor() as editor:
editor.alter_field(Transitional, old_field, new_field, strict=True)
# Create a model class representing the updated model.
class IntegerUnique(Model):
i = IntegerField(unique=True)
j = IntegerField(primary_key=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = "INTEGERPK"
# Ensure unique constraint works.
IntegerUnique.objects.create(i=1, j=1)
with self.assertRaises(IntegrityError):
IntegerUnique.objects.create(i=1, j=2)
def test_rename(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertNotIn("display_name", columns)
# Alter the name field's name
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("display_name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertEqual(
columns["display_name"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertNotIn("name", columns)
@isolate_apps("schema")
def test_rename_referenced_field(self):
class Author(Model):
name = CharField(max_length=255, unique=True)
class Meta:
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE, to_field="name")
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("renamed")
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
editor.alter_field(Author, Author._meta.get_field("name"), new_field)
# Ensure the foreign key reference was updated.
self.assertForeignKeyExists(Book, "author_id", "schema_author", "renamed")
@skipIfDBFeature("interprets_empty_strings_as_nulls")
def test_rename_keep_null_status(self):
"""
Renaming a field shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = TextField()
new_field.set_attributes_from_name("detail_info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(columns["detail_info"][0], "TextField")
self.assertNotIn("info", columns)
with self.assertRaises(IntegrityError):
NoteRename.objects.create(detail_info=None)
def _test_m2m_create(self, M2MFieldClass):
"""
Tests M2M fields on models during creation
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2M)
# Ensure there is now an m2m table there
columns = self.column_classes(
LocalBookWithM2M._meta.get_field("tags").remote_field.through
)
self.assertEqual(
columns["tagm2mtest_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
def test_m2m_create(self):
self._test_m2m_create(ManyToManyField)
def test_m2m_create_custom(self):
self._test_m2m_create(CustomManyToManyField)
def test_m2m_create_inherited(self):
self._test_m2m_create(InheritedManyToManyField)
def _test_m2m_create_through(self, M2MFieldClass):
"""
Tests M2M fields on models during creation with through models
"""
class LocalTagThrough(Model):
book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
class LocalBookWithM2MThrough(Model):
tags = M2MFieldClass(
"TagM2MTest", related_name="books", through=LocalTagThrough
)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalTagThrough, LocalBookWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalTagThrough)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2MThrough)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalTagThrough)
self.assertEqual(
columns["book_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertEqual(
columns["tag_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
def test_m2m_create_through(self):
self._test_m2m_create_through(ManyToManyField)
def test_m2m_create_through_custom(self):
self._test_m2m_create_through(CustomManyToManyField)
def test_m2m_create_through_inherited(self):
self._test_m2m_create_through(InheritedManyToManyField)
def test_m2m_through_remove(self):
class LocalAuthorNoteThrough(Model):
book = ForeignKey("schema.Author", CASCADE)
tag = ForeignKey("self", CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
class LocalNoteWithM2MThrough(Model):
authors = ManyToManyField("schema.Author", through=LocalAuthorNoteThrough)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorNoteThrough, LocalNoteWithM2MThrough]
# Create the tables.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalAuthorNoteThrough)
editor.create_model(LocalNoteWithM2MThrough)
# Remove the through parameter.
old_field = LocalNoteWithM2MThrough._meta.get_field("authors")
new_field = ManyToManyField("Author")
new_field.set_attributes_from_name("authors")
msg = (
f"Cannot alter field {old_field} into {new_field} - they are not "
f"compatible types (you cannot alter to or from M2M fields, or add or "
f"remove through= on M2M fields)"
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.alter_field(LocalNoteWithM2MThrough, old_field, new_field)
def _test_m2m(self, M2MFieldClass):
"""
Tests adding/removing M2M fields on models
"""
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorWithM2M)
editor.create_model(TagM2MTest)
# Create an M2M field
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors")
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
# Ensure there is now an m2m table there
columns = self.column_classes(new_field.remote_field.through)
self.assertEqual(
columns["tagm2mtest_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# "Alter" the field. This should not rename the DB table to itself.
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True)
# Remove the M2M table again
with connection.schema_editor() as editor:
editor.remove_field(LocalAuthorWithM2M, new_field)
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Make sure the model state is coherent with the table one now that
# we've removed the tags field.
opts = LocalAuthorWithM2M._meta
opts.local_many_to_many.remove(new_field)
del new_apps.all_models["schema"][
new_field.remote_field.through._meta.model_name
]
opts._expire_cache()
def test_m2m(self):
self._test_m2m(ManyToManyField)
def test_m2m_custom(self):
self._test_m2m(CustomManyToManyField)
def test_m2m_inherited(self):
self._test_m2m(InheritedManyToManyField)
def _test_m2m_through_alter(self, M2MFieldClass):
"""
Tests altering M2Ms with explicit through models (should no-op)
"""
class LocalAuthorTag(Model):
author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
class LocalAuthorWithM2MThrough(Model):
name = CharField(max_length=255)
tags = M2MFieldClass(
"schema.TagM2MTest", related_name="authors", through=LocalAuthorTag
)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorTag)
editor.create_model(LocalAuthorWithM2MThrough)
editor.create_model(TagM2MTest)
# Ensure the m2m table is there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
# "Alter" the field's blankness. This should not actually do anything.
old_field = LocalAuthorWithM2MThrough._meta.get_field("tags")
new_field = M2MFieldClass(
"schema.TagM2MTest", related_name="authors", through=LocalAuthorTag
)
new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags")
with connection.schema_editor() as editor:
editor.alter_field(
LocalAuthorWithM2MThrough, old_field, new_field, strict=True
)
# Ensure the m2m table is still there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
def test_m2m_through_alter(self):
self._test_m2m_through_alter(ManyToManyField)
def test_m2m_through_alter_custom(self):
self._test_m2m_through_alter(CustomManyToManyField)
def test_m2m_through_alter_inherited(self):
self._test_m2m_through_alter(InheritedManyToManyField)
def _test_m2m_repoint(self, M2MFieldClass):
"""
Tests repointing M2M fields
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBookWithM2M)
editor.create_model(TagM2MTest)
editor.create_model(UniqueTest)
# Ensure the M2M exists and points to TagM2MTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(
LocalBookWithM2M._meta.get_field("tags").remote_field.through,
"tagm2mtest_id",
"schema_tagm2mtest",
)
# Repoint the M2M
old_field = LocalBookWithM2M._meta.get_field("tags")
new_field = M2MFieldClass(UniqueTest)
new_field.contribute_to_class(LocalBookWithM2M, "uniques")
with connection.schema_editor() as editor:
editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True)
# Ensure old M2M is gone
with self.assertRaises(DatabaseError):
self.column_classes(
LocalBookWithM2M._meta.get_field("tags").remote_field.through
)
# This model looks like the new model and is used for teardown.
opts = LocalBookWithM2M._meta
opts.local_many_to_many.remove(old_field)
# Ensure the new M2M exists and points to UniqueTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(
new_field.remote_field.through, "uniquetest_id", "schema_uniquetest"
)
def test_m2m_repoint(self):
self._test_m2m_repoint(ManyToManyField)
def test_m2m_repoint_custom(self):
self._test_m2m_repoint(CustomManyToManyField)
def test_m2m_repoint_inherited(self):
self._test_m2m_repoint(InheritedManyToManyField)
@isolate_apps("schema")
def test_m2m_rename_field_in_target_model(self):
class LocalTagM2MTest(Model):
title = CharField(max_length=255)
class Meta:
app_label = "schema"
class LocalM2M(Model):
tags = ManyToManyField(LocalTagM2MTest)
class Meta:
app_label = "schema"
# Create the tables.
with connection.schema_editor() as editor:
editor.create_model(LocalM2M)
editor.create_model(LocalTagM2MTest)
self.isolated_local_models = [LocalM2M, LocalTagM2MTest]
# Ensure the m2m table is there.
self.assertEqual(len(self.column_classes(LocalM2M)), 1)
# Alter a field in LocalTagM2MTest.
old_field = LocalTagM2MTest._meta.get_field("title")
new_field = CharField(max_length=254)
new_field.contribute_to_class(LocalTagM2MTest, "title1")
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True)
# Ensure the m2m table is still there.
self.assertEqual(len(self.column_classes(LocalM2M)), 1)
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
def test_check_constraints(self):
"""
Tests creating/deleting CHECK constraints
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the constraint exists
constraints = self.get_constraints(Author._meta.db_table)
if not any(
details["columns"] == ["height"] and details["check"]
for details in constraints.values()
):
self.fail("No check constraint for height found")
# Alter the column to remove it
old_field = Author._meta.get_field("height")
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
for details in constraints.values():
if details["columns"] == ["height"] and details["check"]:
self.fail("Check constraint for height found")
# Alter the column to re-add it
new_field2 = Author._meta.get_field("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
if not any(
details["columns"] == ["height"] and details["check"]
for details in constraints.values()
):
self.fail("No check constraint for height found")
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
@isolate_apps("schema")
def test_check_constraint_timedelta_param(self):
class DurationModel(Model):
duration = DurationField()
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(DurationModel)
self.isolated_local_models = [DurationModel]
constraint_name = "duration_gte_5_minutes"
constraint = CheckConstraint(
check=Q(duration__gt=datetime.timedelta(minutes=5)),
name=constraint_name,
)
DurationModel._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(DurationModel, constraint)
constraints = self.get_constraints(DurationModel._meta.db_table)
self.assertIn(constraint_name, constraints)
with self.assertRaises(IntegrityError), atomic():
DurationModel.objects.create(duration=datetime.timedelta(minutes=4))
DurationModel.objects.create(duration=datetime.timedelta(minutes=10))
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
def test_remove_field_check_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the custom check constraint
constraint = CheckConstraint(
check=Q(height__gte=0), name="author_height_gte_0_check"
)
custom_constraint_name = constraint.name
Author._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["height"]
and details["check"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Alter the column to remove field check
old_field = Author._meta.get_field("height")
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["height"]
and details["check"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Alter the column to re-add field check
new_field2 = Author._meta.get_field("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["height"]
and details["check"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the check constraint
with connection.schema_editor() as editor:
Author._meta.constraints = []
editor.remove_constraint(Author, constraint)
def test_unique(self):
"""
Tests removing and adding unique constraints to a single column.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the field is unique to begin with
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be non-unique
old_field = Tag._meta.get_field("slug")
new_field = SlugField(unique=False)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
# Ensure the field is no longer unique
Tag.objects.create(title="foo", slug="foo")
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be unique
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
# Ensure the field is unique again
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Rename the field
new_field3 = SlugField(unique=True)
new_field3.set_attributes_from_name("slug2")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field2, new_field3, strict=True)
# Ensure the field is still unique
TagUniqueRename.objects.create(title="foo", slug2="foo")
with self.assertRaises(IntegrityError):
TagUniqueRename.objects.create(title="bar", slug2="foo")
Tag.objects.all().delete()
def test_unique_name_quoting(self):
old_table_name = TagUniqueRename._meta.db_table
try:
with connection.schema_editor() as editor:
editor.create_model(TagUniqueRename)
editor.alter_db_table(TagUniqueRename, old_table_name, "unique-table")
TagUniqueRename._meta.db_table = "unique-table"
# This fails if the unique index name isn't quoted.
editor.alter_unique_together(TagUniqueRename, [], (("title", "slug2"),))
finally:
with connection.schema_editor() as editor:
editor.delete_model(TagUniqueRename)
TagUniqueRename._meta.db_table = old_table_name
@isolate_apps("schema")
@skipUnlessDBFeature("supports_foreign_keys")
def test_unique_no_unnecessary_fk_drops(self):
"""
If AlterField isn't selective about dropping foreign key constraints
when modifying a field with a unique constraint, the AlterField
incorrectly drops and recreates the Book.author foreign key even though
it doesn't restrict the field being changed (#29193).
"""
class Author(Model):
name = CharField(max_length=254, unique=True)
class Meta:
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
new_field = CharField(max_length=255, unique=True)
new_field.model = Author
new_field.set_attributes_from_name("name")
with self.assertLogs("django.db.backends.schema", "DEBUG") as cm:
with connection.schema_editor() as editor:
editor.alter_field(Author, Author._meta.get_field("name"), new_field)
# One SQL statement is executed to alter the field.
self.assertEqual(len(cm.records), 1)
@isolate_apps("schema")
def test_unique_and_reverse_m2m(self):
"""
AlterField can modify a unique field when there's a reverse M2M
relation on the model.
"""
class Tag(Model):
title = CharField(max_length=255)
slug = SlugField(unique=True)
class Meta:
app_label = "schema"
class Book(Model):
tags = ManyToManyField(Tag, related_name="books")
class Meta:
app_label = "schema"
self.isolated_local_models = [Book._meta.get_field("tags").remote_field.through]
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Book)
new_field = SlugField(max_length=75, unique=True)
new_field.model = Tag
new_field.set_attributes_from_name("slug")
with self.assertLogs("django.db.backends.schema", "DEBUG") as cm:
with connection.schema_editor() as editor:
editor.alter_field(Tag, Tag._meta.get_field("slug"), new_field)
# One SQL statement is executed to alter the field.
self.assertEqual(len(cm.records), 1)
# Ensure that the field is still unique.
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_field_unique_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorWithUniqueName)
self.local_models = [AuthorWithUniqueName]
# Add the custom unique constraint
constraint = UniqueConstraint(fields=["name"], name="author_name_uniq")
custom_constraint_name = constraint.name
AuthorWithUniqueName._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(AuthorWithUniqueName, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Alter the column to remove field uniqueness
old_field = AuthorWithUniqueName._meta.get_field("name")
new_field = CharField(max_length=255)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True)
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Alter the column to re-add field uniqueness
new_field2 = AuthorWithUniqueName._meta.get_field("name")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True)
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the unique constraint
with connection.schema_editor() as editor:
AuthorWithUniqueName._meta.constraints = []
editor.remove_constraint(AuthorWithUniqueName, constraint)
def test_unique_together(self):
"""
Tests removing and adding unique_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
# Ensure the fields are unique to begin with
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2011, slug="foo")
UniqueTest.objects.create(year=2011, slug="bar")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter the model to its non-unique-together companion
with connection.schema_editor() as editor:
editor.alter_unique_together(
UniqueTest, UniqueTest._meta.unique_together, []
)
# Ensure the fields are no longer unique
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_unique_together(
UniqueTest, [], UniqueTest._meta.unique_together
)
# Ensure the fields are unique again
UniqueTest.objects.create(year=2012, slug="foo")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
def test_unique_together_with_fk(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [["author", "title"]])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [["author", "title"]], [])
def test_unique_together_with_fk_with_existing_index(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key, where the foreign key is added after the model is
created.
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithoutAuthor)
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
editor.add_field(BookWithoutAuthor, new_field)
# Ensure the fields aren't unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [["author", "title"]])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [["author", "title"]], [])
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_unique_together_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorWithUniqueNameAndBirthday)
self.local_models = [AuthorWithUniqueNameAndBirthday]
# Add the custom unique constraint
constraint = UniqueConstraint(
fields=["name", "birthday"], name="author_name_birthday_uniq"
)
custom_constraint_name = constraint.name
AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(
AuthorWithUniqueNameAndBirthday._meta.db_table
)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Remove unique together
unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together
with connection.schema_editor() as editor:
editor.alter_unique_together(
AuthorWithUniqueNameAndBirthday, unique_together, []
)
constraints = self.get_constraints(
AuthorWithUniqueNameAndBirthday._meta.db_table
)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Re-add unique together
with connection.schema_editor() as editor:
editor.alter_unique_together(
AuthorWithUniqueNameAndBirthday, [], unique_together
)
constraints = self.get_constraints(
AuthorWithUniqueNameAndBirthday._meta.db_table
)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the unique constraint
with connection.schema_editor() as editor:
AuthorWithUniqueNameAndBirthday._meta.constraints = []
editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint)
def test_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(fields=["name"], name="name_uq")
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIs(sql.references_table(table), True)
self.assertIs(sql.references_column(table, "name"), True)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(Upper("name").desc(), name="func_upper_uq")
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ["DESC"])
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
# SQL contains a database function.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_composite_func_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
constraint = UniqueConstraint(
Upper("title"),
Lower("slug"),
name="func_upper_lower_unq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(BookWithSlug, constraint)
sql = constraint.create_sql(BookWithSlug, editor)
table = BookWithSlug._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
# SQL contains database functions.
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "slug"), True)
sql = str(sql)
self.assertIn("UPPER(%s)" % editor.quote_name("title"), sql)
self.assertIn("LOWER(%s)" % editor.quote_name("slug"), sql)
self.assertLess(sql.index("UPPER"), sql.index("LOWER"))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(BookWithSlug, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_unique_constraint_field_and_expression(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
F("height").desc(),
"uuid",
Lower("name").asc(),
name="func_f_lower_field_unq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ["DESC", "ASC", "ASC"])
constraints = self.get_constraints(table)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertEqual(len(constraints[constraint.name]["columns"]), 3)
self.assertEqual(constraints[constraint.name]["columns"][1], "uuid")
# SQL contains database functions and columns.
self.assertIs(sql.references_column(table, "height"), True)
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "uuid"), True)
self.assertIn("LOWER(%s)" % editor.quote_name("name"), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_partial_indexes")
def test_func_unique_constraint_partial(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
Upper("name"),
name="func_upper_cond_weight_uq",
condition=Q(weight__isnull=False),
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql))
self.assertIn(
"WHERE %s IS NOT NULL" % editor.quote_name("weight"),
str(sql),
)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_covering_indexes")
def test_func_unique_constraint_covering(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
Upper("name"),
name="func_upper_covering_uq",
include=["weight", "height"],
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertEqual(
constraints[constraint.name]["columns"],
[None, "weight", "height"],
)
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "weight"), True)
self.assertIs(sql.references_column(table, "height"), True)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql))
self.assertIn(
"INCLUDE (%s, %s)"
% (
editor.quote_name("weight"),
editor.quote_name("height"),
),
str(sql),
)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_lookups(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs):
constraint = UniqueConstraint(
F("name__lower"),
F("weight__abs"),
name="func_lower_abs_lookup_uq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
# SQL contains columns.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "weight"), True)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_collate(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
constraint = UniqueConstraint(
Collate(F("title"), collation=collation).desc(),
Collate("slug", collation=collation),
name="func_collate_uq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(BookWithSlug, constraint)
sql = constraint.create_sql(BookWithSlug, editor)
table = BookWithSlug._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ["DESC", "ASC"])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "slug"), True)
self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(BookWithSlug, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipIfDBFeature("supports_expression_indexes")
def test_func_unique_constraint_unsupported(self):
# UniqueConstraint is ignored on databases that don't support indexes on
# expressions.
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(F("name"), name="func_name_uq")
with connection.schema_editor() as editor, self.assertNumQueries(0):
self.assertIsNone(editor.add_constraint(Author, constraint))
self.assertIsNone(editor.remove_constraint(Author, constraint))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_nonexistent_field(self):
constraint = UniqueConstraint(Lower("nonexistent"), name="func_nonexistent_uq")
msg = (
"Cannot resolve keyword 'nonexistent' into field. Choices are: "
"height, id, name, uuid, weight"
)
with self.assertRaisesMessage(FieldError, msg):
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_nondeterministic(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(Random(), name="func_random_uq")
with connection.schema_editor() as editor:
with self.assertRaises(DatabaseError):
editor.add_constraint(Author, constraint)
def test_index_together(self):
"""
Tests removing and adding index_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure there's no index on the year/slug columns first
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c["columns"] == ["slug", "title"]
),
False,
)
# Alter the model to add an index
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [], [("slug", "title")])
# Ensure there is now an index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c["columns"] == ["slug", "title"]
),
True,
)
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [("slug", "title")], [])
# Ensure there's no index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c["columns"] == ["slug", "title"]
),
False,
)
def test_index_together_with_fk(self):
"""
Tests removing and adding index_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.index_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [], [["author", "title"]])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [["author", "title"]], [])
def test_create_index_together(self):
"""
Tests creating models with index_together already defined
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(TagIndexed)
# Ensure there is an index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tagindexed").values()
if c["columns"] == ["slug", "title"]
),
True,
)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_index_together_does_not_remove_meta_indexes(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorWithIndexedNameAndBirthday)
self.local_models = [AuthorWithIndexedNameAndBirthday]
# Add the custom index
index = Index(fields=["name", "birthday"], name="author_name_birthday_idx")
custom_index_name = index.name
AuthorWithIndexedNameAndBirthday._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(AuthorWithIndexedNameAndBirthday, index)
# Ensure the indexes exist
constraints = self.get_constraints(
AuthorWithIndexedNameAndBirthday._meta.db_table
)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["index"]
and name != custom_index_name
]
self.assertEqual(len(other_constraints), 1)
# Remove index together
index_together = AuthorWithIndexedNameAndBirthday._meta.index_together
with connection.schema_editor() as editor:
editor.alter_index_together(
AuthorWithIndexedNameAndBirthday, index_together, []
)
constraints = self.get_constraints(
AuthorWithIndexedNameAndBirthday._meta.db_table
)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["index"]
and name != custom_index_name
]
self.assertEqual(len(other_constraints), 0)
# Re-add index together
with connection.schema_editor() as editor:
editor.alter_index_together(
AuthorWithIndexedNameAndBirthday, [], index_together
)
constraints = self.get_constraints(
AuthorWithIndexedNameAndBirthday._meta.db_table
)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["index"]
and name != custom_index_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the index
with connection.schema_editor() as editor:
AuthorWithIndexedNameAndBirthday._meta.indexes = []
editor.remove_index(AuthorWithIndexedNameAndBirthday, index)
@isolate_apps("schema")
def test_db_table(self):
"""
Tests renaming of the table
"""
class Author(Model):
name = CharField(max_length=255)
class Meta:
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "schema"
# Create the table and one referring it.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there to begin with
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
# Alter the table
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
editor.alter_db_table(Author, "schema_author", "schema_otherauthor")
Author._meta.db_table = "schema_otherauthor"
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
# Ensure the foreign key reference was updated
self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor")
# Alter the table again
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
editor.alter_db_table(Author, "schema_otherauthor", "schema_author")
# Ensure the table is still there
Author._meta.db_table = "schema_author"
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
def test_add_remove_index(self):
"""
Tests index addition and removal
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the table is there and has no index
self.assertNotIn("title", self.get_indexes(Author._meta.db_table))
# Add the index
index = Index(fields=["name"], name="author_title_idx")
with connection.schema_editor() as editor:
editor.add_index(Author, index)
self.assertIn("name", self.get_indexes(Author._meta.db_table))
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn("name", self.get_indexes(Author._meta.db_table))
def test_remove_db_index_doesnt_remove_custom_indexes(self):
"""
Changing db_index to False doesn't remove indexes from Meta.indexes.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithIndexedName)
self.local_models = [AuthorWithIndexedName]
# Ensure the table has its index
self.assertIn("name", self.get_indexes(AuthorWithIndexedName._meta.db_table))
# Add the custom index
index = Index(fields=["-name"], name="author_name_idx")
author_index_name = index.name
with connection.schema_editor() as editor:
db_index_name = editor._create_index_name(
table_name=AuthorWithIndexedName._meta.db_table,
column_names=("name",),
)
try:
AuthorWithIndexedName._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(AuthorWithIndexedName, index)
old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertIn(author_index_name, old_constraints)
self.assertIn(db_index_name, old_constraints)
# Change name field to db_index=False
old_field = AuthorWithIndexedName._meta.get_field("name")
new_field = CharField(max_length=255)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorWithIndexedName, old_field, new_field, strict=True
)
new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertNotIn(db_index_name, new_constraints)
# The index from Meta.indexes is still in the database.
self.assertIn(author_index_name, new_constraints)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(AuthorWithIndexedName, index)
finally:
AuthorWithIndexedName._meta.indexes = []
def test_order_index(self):
"""
Indexes defined with ordering (ASC/DESC) defined on column
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
# The table doesn't have an index
self.assertNotIn("title", self.get_indexes(Author._meta.db_table))
index_name = "author_name_idx"
# Add the index
index = Index(fields=["name", "-weight"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(Author, index)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(Author._meta.db_table, index_name, ["ASC", "DESC"])
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
def test_indexes(self):
"""
Tests creation/altering of indexes
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there and has the right index
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to remove the index
old_field = Book._meta.get_field("title")
new_field = CharField(max_length=100, db_index=False)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the table is there and has no index
self.assertNotIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to re-add the index
new_field2 = Book._meta.get_field("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, new_field, new_field2, strict=True)
# Ensure the table is there and has the index again
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Add a unique column, verify that creates an implicit index
new_field3 = BookWithSlug._meta.get_field("slug")
with connection.schema_editor() as editor:
editor.add_field(Book, new_field3)
self.assertIn(
"slug",
self.get_uniques(Book._meta.db_table),
)
# Remove the unique, check the index goes with it
new_field4 = CharField(max_length=20, unique=False)
new_field4.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True)
self.assertNotIn(
"slug",
self.get_uniques(Book._meta.db_table),
)
def test_text_field_with_db_index(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorTextFieldWithIndex)
# The text_field index is present if the database supports it.
assertion = (
self.assertIn
if connection.features.supports_index_on_text_field
else self.assertNotIn
)
assertion(
"text_field", self.get_indexes(AuthorTextFieldWithIndex._meta.db_table)
)
def _index_expressions_wrappers(self):
index_expression = IndexExpression()
index_expression.set_wrapper_classes(connection)
return ", ".join(
[
wrapper_cls.__qualname__
for wrapper_cls in index_expression.wrapper_classes
]
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_multiple_wrapper_references(self):
index = Index(OrderBy(F("name").desc(), descending=True), name="name")
msg = (
"Multiple references to %s can't be used in an indexed expression."
% self._index_expressions_wrappers()
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.add_index(Author, index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_invalid_topmost_expressions(self):
index = Index(Upper(F("name").desc()), name="name")
msg = (
"%s must be topmost expressions in an indexed expression."
% self._index_expressions_wrappers()
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.add_index(Author, index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Lower("name").desc(), name="func_lower_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC"])
# SQL contains a database function.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("LOWER(%s)" % editor.quote_name("name"), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_f(self):
with connection.schema_editor() as editor:
editor.create_model(Tag)
index = Index("slug", F("title").desc(), name="func_f_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Tag, index)
sql = index.create_sql(Tag, editor)
table = Tag._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(Tag._meta.db_table, index.name, ["ASC", "DESC"])
# SQL contains columns.
self.assertIs(sql.references_column(table, "slug"), True)
self.assertIs(sql.references_column(table, "title"), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Tag, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_lookups(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs):
index = Index(
F("name__lower"),
F("weight__abs"),
name="func_lower_abs_lookup_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains columns.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "weight"), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_composite_func_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Lower("name"), Upper("name"), name="func_lower_upper_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains database functions.
self.assertIs(sql.references_column(table, "name"), True)
sql = str(sql)
self.assertIn("LOWER(%s)" % editor.quote_name("name"), sql)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), sql)
self.assertLess(sql.index("LOWER"), sql.index("UPPER"))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_composite_func_index_field_and_expression(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
index = Index(
F("author").desc(),
Lower("title").asc(),
"pub_date",
name="func_f_lower_field_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Book, index)
sql = index.create_sql(Book, editor)
table = Book._meta.db_table
constraints = self.get_constraints(table)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC", "ASC", "ASC"])
self.assertEqual(len(constraints[index.name]["columns"]), 3)
self.assertEqual(constraints[index.name]["columns"][2], "pub_date")
# SQL contains database functions and columns.
self.assertIs(sql.references_column(table, "author_id"), True)
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "pub_date"), True)
self.assertIn("LOWER(%s)" % editor.quote_name("title"), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Book, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
@isolate_apps("schema")
def test_func_index_f_decimalfield(self):
class Node(Model):
value = DecimalField(max_digits=5, decimal_places=2)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Node)
index = Index(F("value"), name="func_f_decimalfield_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Node, index)
sql = index.create_sql(Node, editor)
table = Node._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "value"), True)
# SQL doesn't contain casting.
self.assertNotIn("CAST", str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Node, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_cast(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Cast("weight", FloatField()), name="func_cast_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "weight"), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_collate(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
index = Index(
Collate(F("title"), collation=collation).desc(),
Collate("slug", collation=collation),
name="func_collate_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(BookWithSlug, index)
sql = index.create_sql(BookWithSlug, editor)
table = Book._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC", "ASC"])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "slug"), True)
self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Book, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
@skipIfDBFeature("collate_as_index_expression")
def test_func_index_collate_f_ordered(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(
Collate(F("name").desc(), collation=collation),
name="func_collate_f_desc_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC"])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_calc(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(F("height") / (F("weight") + Value(5)), name="func_calc_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains columns and expressions.
self.assertIs(sql.references_column(table, "height"), True)
self.assertIs(sql.references_column(table, "weight"), True)
sql = str(sql)
self.assertIs(
sql.index(editor.quote_name("height"))
< sql.index("/")
< sql.index(editor.quote_name("weight"))
< sql.index("+")
< sql.index("5"),
True,
)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_json_field")
@isolate_apps("schema")
def test_func_index_json_key_transform(self):
class JSONModel(Model):
field = JSONField()
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(JSONModel)
self.isolated_local_models = [JSONModel]
index = Index("field__some_key", name="func_json_key_idx")
with connection.schema_editor() as editor:
editor.add_index(JSONModel, index)
sql = index.create_sql(JSONModel, editor)
table = JSONModel._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "field"), True)
with connection.schema_editor() as editor:
editor.remove_index(JSONModel, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_json_field")
@isolate_apps("schema")
def test_func_index_json_key_transform_cast(self):
class JSONModel(Model):
field = JSONField()
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(JSONModel)
self.isolated_local_models = [JSONModel]
index = Index(
Cast(KeyTextTransform("some_key", "field"), IntegerField()),
name="func_json_key_cast_idx",
)
with connection.schema_editor() as editor:
editor.add_index(JSONModel, index)
sql = index.create_sql(JSONModel, editor)
table = JSONModel._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "field"), True)
with connection.schema_editor() as editor:
editor.remove_index(JSONModel, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipIfDBFeature("supports_expression_indexes")
def test_func_index_unsupported(self):
# Index is ignored on databases that don't support indexes on
# expressions.
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(F("name"), name="random_idx")
with connection.schema_editor() as editor, self.assertNumQueries(0):
self.assertIsNone(editor.add_index(Author, index))
self.assertIsNone(editor.remove_index(Author, index))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_nonexistent_field(self):
index = Index(Lower("nonexistent"), name="func_nonexistent_idx")
msg = (
"Cannot resolve keyword 'nonexistent' into field. Choices are: "
"height, id, name, uuid, weight"
)
with self.assertRaisesMessage(FieldError, msg):
with connection.schema_editor() as editor:
editor.add_index(Author, index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_nondeterministic(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Random(), name="func_random_idx")
with connection.schema_editor() as editor:
with self.assertRaises(DatabaseError):
editor.add_index(Author, index)
def test_primary_key(self):
"""
Tests altering of the primary key
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the table is there and has the right PK
self.assertEqual(self.get_primary_key(Tag._meta.db_table), "id")
# Alter to change the PK
id_field = Tag._meta.get_field("id")
old_field = Tag._meta.get_field("slug")
new_field = SlugField(primary_key=True)
new_field.set_attributes_from_name("slug")
new_field.model = Tag
with connection.schema_editor() as editor:
editor.remove_field(Tag, id_field)
editor.alter_field(Tag, old_field, new_field)
# Ensure the PK changed
self.assertNotIn(
"id",
self.get_indexes(Tag._meta.db_table),
)
self.assertEqual(self.get_primary_key(Tag._meta.db_table), "slug")
def test_context_manager_exit(self):
"""
Ensures transaction is correctly closed when an error occurs
inside a SchemaEditor context.
"""
class SomeError(Exception):
pass
try:
with connection.schema_editor():
raise SomeError
except SomeError:
self.assertFalse(connection.in_atomic_block)
@skipIfDBFeature("can_rollback_ddl")
def test_unsupported_transactional_ddl_disallowed(self):
message = (
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
with atomic(), connection.schema_editor() as editor:
with self.assertRaisesMessage(TransactionManagementError, message):
editor.execute(
editor.sql_create_table % {"table": "foo", "definition": ""}
)
@skipUnlessDBFeature("supports_foreign_keys", "indexes_foreign_keys")
def test_foreign_key_index_long_names_regression(self):
"""
Regression test for #21497.
Only affects databases that supports foreign keys.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Find the properly shortened column name
column_name = connection.ops.quote_name(
"author_foreign_key_with_really_long_field_name_id"
)
column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase
# Ensure the table is there and has an index on the column
self.assertIn(
column_name,
self.get_indexes(BookWithLongName._meta.db_table),
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_add_foreign_key_long_names(self):
"""
Regression test for #23009.
Only affects databases that supports foreign keys.
"""
# Create the initial tables
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Add a second FK, this would fail due to long ref name before the fix
new_field = ForeignKey(
AuthorWithEvenLongerName, CASCADE, related_name="something"
)
new_field.set_attributes_from_name(
"author_other_really_long_named_i_mean_so_long_fk"
)
with connection.schema_editor() as editor:
editor.add_field(BookWithLongName, new_field)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_foreign_keys")
def test_add_foreign_key_quoted_db_table(self):
class Author(Model):
class Meta:
db_table = '"table_author_double_quoted"'
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
self.isolated_local_models = [Author]
if connection.vendor == "mysql":
self.assertForeignKeyExists(
Book, "author_id", '"table_author_double_quoted"'
)
else:
self.assertForeignKeyExists(Book, "author_id", "table_author_double_quoted")
def test_add_foreign_object(self):
with connection.schema_editor() as editor:
editor.create_model(BookForeignObj)
self.local_models = [BookForeignObj]
new_field = ForeignObject(
Author, on_delete=CASCADE, from_fields=["author_id"], to_fields=["id"]
)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.add_field(BookForeignObj, new_field)
def test_creation_deletion_reserved_names(self):
"""
Tries creating a model's table, and then deleting it when it has a
SQL reserved name.
"""
# Create the table
with connection.schema_editor() as editor:
try:
editor.create_model(Thing)
except OperationalError as e:
self.fail(
"Errors when applying initial migration for a model "
"with a table named after an SQL reserved word: %s" % e
)
# The table is there
list(Thing.objects.all())
# Clean up that table
with connection.schema_editor() as editor:
editor.delete_model(Thing)
# The table is gone
with self.assertRaises(DatabaseError):
list(Thing.objects.all())
def test_remove_constraints_capital_letters(self):
"""
#23065 - Constraint names must be quoted if they contain capital letters.
"""
def get_field(*args, field_class=IntegerField, **kwargs):
kwargs["db_column"] = "CamelCase"
field = field_class(*args, **kwargs)
field.set_attributes_from_name("CamelCase")
return field
model = Author
field = get_field()
table = model._meta.db_table
column = field.column
identifier_converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
editor.create_model(model)
editor.add_field(model, field)
constraint_name = "CamelCaseIndex"
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_index
% {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"using": "",
"columns": editor.quote_name(column),
"extra": "",
"condition": "",
"include": "",
}
)
self.assertIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
editor.alter_field(model, get_field(db_index=True), field, strict=True)
self.assertNotIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
constraint_name = "CamelCaseUniqConstraint"
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(editor._create_unique_sql(model, [field], constraint_name))
self.assertIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
editor.alter_field(model, get_field(unique=True), field, strict=True)
self.assertNotIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
if editor.sql_create_fk:
constraint_name = "CamelCaseFKConstraint"
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_fk
% {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"column": editor.quote_name(column),
"to_table": editor.quote_name(table),
"to_column": editor.quote_name(model._meta.auto_field.column),
"deferrable": connection.ops.deferrable_sql(),
}
)
self.assertIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
editor.alter_field(
model,
get_field(Author, CASCADE, field_class=ForeignKey),
field,
strict=True,
)
self.assertNotIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
def test_add_field_use_effective_default(self):
"""
#23987 - effective_default() should be used as the field default when
adding a new field.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name="Anonymous1")
# Add new CharField to ensure default will be used from effective_default
new_field = CharField(max_length=15, blank=True)
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(
item[0],
None if connection.features.interprets_empty_strings_as_nulls else "",
)
def test_add_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name="Anonymous1")
# Add new CharField with a default
new_field = CharField(max_length=15, blank=True, default="surname default")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(item[0], "surname default")
# And that the default is no longer set in the database.
field = next(
f
for f in connection.introspection.get_table_description(
cursor, "schema_author"
)
if f.name == "surname"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_add_field_default_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add new nullable CharField with a default.
new_field = CharField(max_length=15, blank=True, null=True, default="surname")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
Author.objects.create(name="Anonymous1")
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertIsNone(item[0])
field = next(
f
for f in connection.introspection.get_table_description(
cursor,
"schema_author",
)
if f.name == "surname"
)
# Field is still nullable.
self.assertTrue(field.null_ok)
# The database default is no longer set.
if connection.features.can_introspect_default:
self.assertIn(field.default, ["NULL", None])
def test_add_textfield_default_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add new nullable TextField with a default.
new_field = TextField(blank=True, null=True, default="text")
new_field.set_attributes_from_name("description")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
Author.objects.create(name="Anonymous1")
with connection.cursor() as cursor:
cursor.execute("SELECT description FROM schema_author;")
item = cursor.fetchall()[0]
self.assertIsNone(item[0])
field = next(
f
for f in connection.introspection.get_table_description(
cursor,
"schema_author",
)
if f.name == "description"
)
# Field is still nullable.
self.assertTrue(field.null_ok)
# The database default is no longer set.
if connection.features.can_introspect_default:
self.assertIn(field.default, ["NULL", None])
def test_alter_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name="Anonymous1")
self.assertIsNone(Author.objects.get().height)
old_field = Author._meta.get_field("height")
# The default from the new field is used in updating existing rows.
new_field = IntegerField(blank=True, default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(Author.objects.get().height, 42)
# The database default should be removed.
with connection.cursor() as cursor:
field = next(
f
for f in connection.introspection.get_table_description(
cursor, "schema_author"
)
if f.name == "height"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_alter_field_default_doesnt_perform_queries(self):
"""
No queries are performed if a field default changes and the field's
not changing from null to non-null.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
old_field = AuthorWithDefaultHeight._meta.get_field("height")
new_default = old_field.default * 2
new_field = PositiveIntegerField(null=True, blank=True, default=new_default)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(
AuthorWithDefaultHeight, old_field, new_field, strict=True
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_fk_attributes_noop(self):
"""
No queries are performed when changing field attributes that don't
affect the schema.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
old_field = Book._meta.get_field("author")
new_field = ForeignKey(
Author,
blank=True,
editable=False,
error_messages={"invalid": "error message"},
help_text="help text",
limit_choices_to={"limit": "choice"},
on_delete=PROTECT,
related_name="related_name",
related_query_name="related_query_name",
validators=[lambda x: x],
verbose_name="verbose name",
)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Book, old_field, new_field, strict=True)
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Book, new_field, old_field, strict=True)
def test_alter_field_choices_noop(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("name")
new_field = CharField(
choices=(("Jane", "Jane"), ("Joe", "Joe")),
max_length=255,
)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Author, old_field, new_field, strict=True)
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Author, new_field, old_field, strict=True)
def test_add_textfield_unhashable_default(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name="Anonymous1")
# Create a field that has an unhashable default
new_field = TextField(default={})
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_add_indexed_charfield(self):
field = CharField(max_length=255, db_index=True)
field.set_attributes_from_name("nom_de_plume")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, "nom_de_plume"),
[
"schema_author_nom_de_plume_7570a851",
"schema_author_nom_de_plume_7570a851_like",
],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_add_unique_charfield(self):
field = CharField(max_length=255, unique=True)
field.set_attributes_from_name("nom_de_plume")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, "nom_de_plume"),
[
"schema_author_nom_de_plume_7570a851_like",
"schema_author_nom_de_plume_key",
],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_index_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_index=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, "name"),
["schema_author_name_1fbc5617", "schema_author_name_1fbc5617_like"],
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
# Alter to add unique=True and create 2 indexes.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, "name"),
["schema_author_name_1fbc5617_like", "schema_author_name_1fbc5617_uniq"],
)
# Remove unique=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_index_to_textfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Note)
self.assertEqual(self.get_constraints_for_column(Note, "info"), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Note._meta.get_field("info")
new_field = TextField(db_index=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Note, "info"),
["schema_note_info_4b0ea695", "schema_note_info_4b0ea695_like"],
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Note, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Note, "info"), [])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield_with_db_index(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field("title")
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"],
)
# Alter to remove unique=True (should drop unique index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_remove_unique_and_db_index_from_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field("title")
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"],
)
# Alter to remove both unique=True and db_index=True (should drop all indexes)
new_field2 = CharField(max_length=100)
new_field2.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"), []
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_swap_unique_and_db_index_with_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
# Alter to set unique=True and remove db_index=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field("title")
new_field = CharField(max_length=100, unique=True)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"],
)
# Alter to set db_index=True and remove unique=True (should restore index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_db_index_to_charfield_with_unique(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Tag)
self.assertEqual(
self.get_constraints_for_column(Tag, "slug"),
["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"],
)
# Alter to add db_index=True
old_field = Tag._meta.get_field("slug")
new_field = SlugField(db_index=True, unique=True)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, "slug"),
["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"],
)
# Alter to remove db_index=True
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, "slug"),
["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"],
)
def test_alter_field_add_index_to_integerfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, "weight"), [])
# Alter to add db_index=True and create index.
old_field = Author._meta.get_field("weight")
new_field = IntegerField(null=True, db_index=True)
new_field.set_attributes_from_name("weight")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, "weight"),
["schema_author_weight_587740f9"],
)
# Remove db_index=True to drop index.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, "weight"), [])
def test_alter_pk_with_self_referential_field(self):
"""
Changing the primary key field name of a model with a self-referential
foreign key (#26384).
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field("node_id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(Node, old_field, new_field, strict=True)
self.assertForeignKeyExists(Node, "parent_id", Node._meta.db_table)
@mock.patch("django.db.backends.base.schema.datetime")
@mock.patch("django.db.backends.base.schema.timezone")
def test_add_datefield_and_datetimefield_use_effective_default(
self, mocked_datetime, mocked_tz
):
"""
effective_default() should be used for DateField, DateTimeField, and
TimeField if auto_now or auto_now_add is set (#25005).
"""
now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1)
now_tz = datetime.datetime(
month=1, day=1, year=2000, hour=1, minute=1, tzinfo=datetime.timezone.utc
)
mocked_datetime.now = mock.MagicMock(return_value=now)
mocked_tz.now = mock.MagicMock(return_value=now_tz)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Check auto_now/auto_now_add attributes are not defined
columns = self.column_classes(Author)
self.assertNotIn("dob_auto_now", columns)
self.assertNotIn("dob_auto_now_add", columns)
self.assertNotIn("dtob_auto_now", columns)
self.assertNotIn("dtob_auto_now_add", columns)
self.assertNotIn("tob_auto_now", columns)
self.assertNotIn("tob_auto_now_add", columns)
# Create a row
Author.objects.create(name="Anonymous1")
# Ensure fields were added with the correct defaults
dob_auto_now = DateField(auto_now=True)
dob_auto_now.set_attributes_from_name("dob_auto_now")
self.check_added_field_default(
editor,
Author,
dob_auto_now,
"dob_auto_now",
now.date(),
cast_function=lambda x: x.date(),
)
dob_auto_now_add = DateField(auto_now_add=True)
dob_auto_now_add.set_attributes_from_name("dob_auto_now_add")
self.check_added_field_default(
editor,
Author,
dob_auto_now_add,
"dob_auto_now_add",
now.date(),
cast_function=lambda x: x.date(),
)
dtob_auto_now = DateTimeField(auto_now=True)
dtob_auto_now.set_attributes_from_name("dtob_auto_now")
self.check_added_field_default(
editor,
Author,
dtob_auto_now,
"dtob_auto_now",
now,
)
dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True)
dt_tm_of_birth_auto_now_add.set_attributes_from_name("dtob_auto_now_add")
self.check_added_field_default(
editor,
Author,
dt_tm_of_birth_auto_now_add,
"dtob_auto_now_add",
now,
)
tob_auto_now = TimeField(auto_now=True)
tob_auto_now.set_attributes_from_name("tob_auto_now")
self.check_added_field_default(
editor,
Author,
tob_auto_now,
"tob_auto_now",
now.time(),
cast_function=lambda x: x.time(),
)
tob_auto_now_add = TimeField(auto_now_add=True)
tob_auto_now_add.set_attributes_from_name("tob_auto_now_add")
self.check_added_field_default(
editor,
Author,
tob_auto_now_add,
"tob_auto_now_add",
now.time(),
cast_function=lambda x: x.time(),
)
def test_namespaced_db_table_create_index_name(self):
"""
Table names are stripped of their namespace/schema before being used to
generate index names.
"""
with connection.schema_editor() as editor:
max_name_length = connection.ops.max_name_length() or 200
namespace = "n" * max_name_length
table_name = "t" * max_name_length
namespaced_table_name = '"%s"."%s"' % (namespace, table_name)
self.assertEqual(
editor._create_index_name(table_name, []),
editor._create_index_name(namespaced_table_name, []),
)
@unittest.skipUnless(
connection.vendor == "oracle", "Oracle specific db_table syntax"
)
def test_creation_with_db_table_double_quotes(self):
oracle_user = connection.creation._test_database_user()
class Student(Model):
name = CharField(max_length=30)
class Meta:
app_label = "schema"
apps = new_apps
db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user
class Document(Model):
name = CharField(max_length=30)
students = ManyToManyField(Student)
class Meta:
app_label = "schema"
apps = new_apps
db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user
self.isolated_local_models = [Student, Document]
with connection.schema_editor() as editor:
editor.create_model(Student)
editor.create_model(Document)
doc = Document.objects.create(name="Test Name")
student = Student.objects.create(name="Some man")
doc.students.add(student)
@isolate_apps("schema")
@unittest.skipUnless(
connection.vendor == "postgresql", "PostgreSQL specific db_table syntax."
)
def test_namespaced_db_table_foreign_key_reference(self):
with connection.cursor() as cursor:
cursor.execute("CREATE SCHEMA django_schema_tests")
def delete_schema():
with connection.cursor() as cursor:
cursor.execute("DROP SCHEMA django_schema_tests CASCADE")
self.addCleanup(delete_schema)
class Author(Model):
class Meta:
app_label = "schema"
class Book(Model):
class Meta:
app_label = "schema"
db_table = '"django_schema_tests"."schema_book"'
author = ForeignKey(Author, CASCADE)
author.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.add_field(Book, author)
def test_rename_table_renames_deferred_sql_references(self):
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.alter_db_table(Author, "schema_author", "schema_renamed_author")
editor.alter_db_table(Author, "schema_book", "schema_renamed_book")
try:
self.assertGreater(len(editor.deferred_sql), 0)
for statement in editor.deferred_sql:
self.assertIs(statement.references_table("schema_author"), False)
self.assertIs(statement.references_table("schema_book"), False)
finally:
editor.alter_db_table(Author, "schema_renamed_author", "schema_author")
editor.alter_db_table(Author, "schema_renamed_book", "schema_book")
def test_rename_column_renames_deferred_sql_references(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
old_title = Book._meta.get_field("title")
new_title = CharField(max_length=100, db_index=True)
new_title.set_attributes_from_name("renamed_title")
editor.alter_field(Book, old_title, new_title)
old_author = Book._meta.get_field("author")
new_author = ForeignKey(Author, CASCADE)
new_author.set_attributes_from_name("renamed_author")
editor.alter_field(Book, old_author, new_author)
self.assertGreater(len(editor.deferred_sql), 0)
for statement in editor.deferred_sql:
self.assertIs(statement.references_column("book", "title"), False)
self.assertIs(statement.references_column("book", "author_id"), False)
@isolate_apps("schema")
def test_referenced_field_without_constraint_rename_inside_atomic_block(self):
"""
Foreign keys without database level constraint don't prevent the field
they reference from being renamed in an atomic block.
"""
class Foo(Model):
field = CharField(max_length=255, unique=True)
class Meta:
app_label = "schema"
class Bar(Model):
foo = ForeignKey(Foo, CASCADE, to_field="field", db_constraint=False)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo, Bar]
with connection.schema_editor() as editor:
editor.create_model(Foo)
editor.create_model(Bar)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("renamed")
with connection.schema_editor(atomic=True) as editor:
editor.alter_field(Foo, Foo._meta.get_field("field"), new_field)
@isolate_apps("schema")
def test_referenced_table_without_constraint_rename_inside_atomic_block(self):
"""
Foreign keys without database level constraint don't prevent the table
they reference from being renamed in an atomic block.
"""
class Foo(Model):
field = CharField(max_length=255, unique=True)
class Meta:
app_label = "schema"
class Bar(Model):
foo = ForeignKey(Foo, CASCADE, to_field="field", db_constraint=False)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo, Bar]
with connection.schema_editor() as editor:
editor.create_model(Foo)
editor.create_model(Bar)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("renamed")
with connection.schema_editor(atomic=True) as editor:
editor.alter_db_table(Foo, Foo._meta.db_table, "renamed_table")
Foo._meta.db_table = "renamed_table"
@isolate_apps("schema")
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_db_collation_charfield(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
class Foo(Model):
field = CharField(max_length=255, db_collation=collation)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo]
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.assertEqual(
self.get_column_collation(Foo._meta.db_table, "field"),
collation,
)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_collation_on_textfield")
def test_db_collation_textfield(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
class Foo(Model):
field = TextField(db_collation=collation)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo]
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.assertEqual(
self.get_column_collation(Foo._meta.db_table, "field"),
collation,
)
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_add_field_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Author)
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("alias")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(
columns["alias"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(columns["alias"][1][8], collation)
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_alter_field_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_collation(Author._meta.db_table, "name"),
collation,
)
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertIsNone(self.get_column_collation(Author._meta.db_table, "name"))
@skipUnlessDBFeature(
"supports_collation_on_charfield", "supports_collation_on_textfield"
)
def test_alter_field_type_and_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("info")
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("info")
new_field.model = Note
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(
columns["info"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(columns["info"][1][8], collation)
with connection.schema_editor() as editor:
editor.alter_field(Note, new_field, old_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(columns["info"][0], "TextField")
self.assertIsNone(columns["info"][1][8])
@skipUnlessDBFeature(
"supports_collation_on_charfield",
"supports_non_deterministic_collations",
)
def test_ci_cs_db_collation(self):
cs_collation = connection.features.test_collations.get("cs")
ci_collation = connection.features.test_collations.get("ci")
try:
if connection.vendor == "mysql":
cs_collation = "latin1_general_cs"
elif connection.vendor == "postgresql":
cs_collation = "en-x-icu"
with connection.cursor() as cursor:
cursor.execute(
"CREATE COLLATION IF NOT EXISTS case_insensitive "
"(provider = icu, locale = 'und-u-ks-level2', "
"deterministic = false)"
)
ci_collation = "case_insensitive"
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
# Case-insensitive collation.
old_field = Author._meta.get_field("name")
new_field_ci = CharField(max_length=255, db_collation=ci_collation)
new_field_ci.set_attributes_from_name("name")
new_field_ci.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field_ci, strict=True)
Author.objects.create(name="ANDREW")
self.assertIs(Author.objects.filter(name="Andrew").exists(), True)
# Case-sensitive collation.
new_field_cs = CharField(max_length=255, db_collation=cs_collation)
new_field_cs.set_attributes_from_name("name")
new_field_cs.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field_ci, new_field_cs, strict=True)
self.assertIs(Author.objects.filter(name="Andrew").exists(), False)
finally:
if connection.vendor == "postgresql":
with connection.cursor() as cursor:
cursor.execute("DROP COLLATION IF EXISTS case_insensitive")
|
442a8f6629b741fd12efb698df6bb64681fa100cb33035d881fc177ae6e2f469 | import asyncio
import os
import sys
from unittest import mock, skipIf
from asgiref.sync import async_to_sync
from django.core.cache import DEFAULT_CACHE_ALIAS, caches
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
from django.http import HttpResponse
from django.test import SimpleTestCase
from django.utils.asyncio import async_unsafe
from django.views.generic.base import View
from .models import SimpleModel
@skipIf(
sys.platform == "win32" and (3, 8, 0) < sys.version_info < (3, 8, 1),
"https://bugs.python.org/issue38563",
)
class CacheTest(SimpleTestCase):
def test_caches_local(self):
@async_to_sync
async def async_cache():
return caches[DEFAULT_CACHE_ALIAS]
cache_1 = async_cache()
cache_2 = async_cache()
self.assertIs(cache_1, cache_2)
@skipIf(
sys.platform == "win32" and (3, 8, 0) < sys.version_info < (3, 8, 1),
"https://bugs.python.org/issue38563",
)
class DatabaseConnectionTest(SimpleTestCase):
"""A database connection cannot be used in an async context."""
async def test_get_async_connection(self):
with self.assertRaises(SynchronousOnlyOperation):
list(SimpleModel.objects.all())
@skipIf(
sys.platform == "win32" and (3, 8, 0) < sys.version_info < (3, 8, 1),
"https://bugs.python.org/issue38563",
)
class AsyncUnsafeTest(SimpleTestCase):
"""
async_unsafe decorator should work correctly and returns the correct
message.
"""
@async_unsafe
def dangerous_method(self):
return True
async def test_async_unsafe(self):
# async_unsafe decorator catches bad access and returns the right
# message.
msg = (
"You cannot call this from an async context - use a thread or "
"sync_to_async."
)
with self.assertRaisesMessage(SynchronousOnlyOperation, msg):
self.dangerous_method()
@mock.patch.dict(os.environ, {"DJANGO_ALLOW_ASYNC_UNSAFE": "true"})
@async_to_sync # mock.patch() is not async-aware.
async def test_async_unsafe_suppressed(self):
# Decorator doesn't trigger check when the environment variable to
# suppress it is set.
try:
self.dangerous_method()
except SynchronousOnlyOperation:
self.fail("SynchronousOnlyOperation should not be raised.")
class SyncView(View):
def get(self, request, *args, **kwargs):
return HttpResponse("Hello (sync) world!")
class AsyncView(View):
async def get(self, request, *args, **kwargs):
return HttpResponse("Hello (async) world!")
class ViewTests(SimpleTestCase):
def test_views_are_correctly_marked(self):
tests = [
(SyncView, False),
(AsyncView, True),
]
for view_cls, is_async in tests:
with self.subTest(view_cls=view_cls, is_async=is_async):
self.assertIs(view_cls.view_is_async, is_async)
callback = view_cls.as_view()
self.assertIs(asyncio.iscoroutinefunction(callback), is_async)
def test_mixed_views_raise_error(self):
class MixedView(View):
def get(self, request, *args, **kwargs):
return HttpResponse("Hello (mixed) world!")
async def post(self, request, *args, **kwargs):
return HttpResponse("Hello (mixed) world!")
msg = (
f"{MixedView.__qualname__} HTTP handlers must either be all sync or all "
"async."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
MixedView.as_view()
def test_options_handler_responds_correctly(self):
tests = [
(SyncView, False),
(AsyncView, True),
]
for view_cls, is_coroutine in tests:
with self.subTest(view_cls=view_cls, is_coroutine=is_coroutine):
instance = view_cls()
response = instance.options(None)
self.assertIs(
asyncio.iscoroutine(response),
is_coroutine,
)
if is_coroutine:
response = asyncio.run(response)
self.assertIsInstance(response, HttpResponse)
def test_base_view_class_is_sync(self):
"""
View and by extension any subclasses that don't define handlers are
sync.
"""
self.assertIs(View.view_is_async, False)
|
796df99d3517250b04b8af031527a878d437ff384e50fe6e48f5206e26b23d79 | from pathlib import Path
from django.dispatch import receiver
from django.template import engines
from django.template.backends.django import DjangoTemplates
from django.utils._os import to_path
from django.utils.autoreload import autoreload_started, file_changed, is_django_path
def get_template_directories():
# Iterate through each template backend and find
# any template_loader that has a 'get_dirs' method.
# Collect the directories, filtering out Django templates.
cwd = Path.cwd()
items = set()
for backend in engines.all():
if not isinstance(backend, DjangoTemplates):
continue
items.update(cwd / to_path(dir) for dir in backend.engine.dirs if dir)
for loader in backend.engine.template_loaders:
if not hasattr(loader, "get_dirs"):
continue
items.update(
cwd / to_path(directory)
for directory in loader.get_dirs()
if directory and not is_django_path(directory)
)
return items
def reset_loaders():
for backend in engines.all():
if not isinstance(backend, DjangoTemplates):
continue
for loader in backend.engine.template_loaders:
loader.reset()
@receiver(autoreload_started, dispatch_uid="template_loaders_watch_changes")
def watch_for_template_changes(sender, **kwargs):
for directory in get_template_directories():
sender.watch_dir(directory, "**/*")
@receiver(file_changed, dispatch_uid="template_loaders_file_changed")
def template_changed(sender, file_path, **kwargs):
if file_path.suffix == ".py":
return
for template_dir in get_template_directories():
if template_dir in file_path.parents:
reset_loaders()
return True
|
5f30ebba81b71c5ecbb7b6d52cf03b6084fc1b96e441654e894f9feacd6dcfd7 | """
Create SQL statements for QuerySets.
The code in here encapsulates all of the SQL construction so that QuerySets
themselves do not have to (and could be backed by things other than SQL
databases). The abstraction barrier only works one way: this module has to know
all about the internals of models in order to get the information it needs.
"""
import copy
import difflib
import functools
import sys
from collections import Counter, namedtuple
from collections.abc import Iterator, Mapping
from itertools import chain, count, product
from string import ascii_uppercase
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db import DEFAULT_DB_ALIAS, NotSupportedError, connections
from django.db.models.aggregates import Count
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import (
BaseExpression,
Col,
Exists,
F,
OuterRef,
Ref,
ResolvedOuterRef,
)
from django.db.models.fields import Field
from django.db.models.fields.related_lookups import MultiColSource
from django.db.models.lookups import Lookup
from django.db.models.query_utils import (
Q,
check_rel_lookup_compatibility,
refs_expression,
)
from django.db.models.sql.constants import INNER, LOUTER, ORDER_DIR, SINGLE
from django.db.models.sql.datastructures import BaseTable, Empty, Join, MultiJoin
from django.db.models.sql.where import AND, OR, ExtraWhere, NothingNode, WhereNode
from django.utils.functional import cached_property
from django.utils.regex_helper import _lazy_re_compile
from django.utils.tree import Node
__all__ = ["Query", "RawQuery"]
# Quotation marks ('"`[]), whitespace characters, semicolons, or inline
# SQL comments are forbidden in column aliases.
FORBIDDEN_ALIAS_PATTERN = _lazy_re_compile(r"['`\"\]\[;\s]|--|/\*|\*/")
# Inspired from
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
EXPLAIN_OPTIONS_PATTERN = _lazy_re_compile(r"[\w\-]+")
def get_field_names_from_opts(opts):
if opts is None:
return set()
return set(
chain.from_iterable(
(f.name, f.attname) if f.concrete else (f.name,) for f in opts.get_fields()
)
)
def get_children_from_q(q):
for child in q.children:
if isinstance(child, Node):
yield from get_children_from_q(child)
else:
yield child
JoinInfo = namedtuple(
"JoinInfo",
("final_field", "targets", "opts", "joins", "path", "transform_function"),
)
class RawQuery:
"""A single raw SQL query."""
def __init__(self, sql, using, params=()):
self.params = params
self.sql = sql
self.using = using
self.cursor = None
# Mirror some properties of a normal query so that
# the compiler can be used to process results.
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.extra_select = {}
self.annotation_select = {}
def chain(self, using):
return self.clone(using)
def clone(self, using):
return RawQuery(self.sql, using, params=self.params)
def get_columns(self):
if self.cursor is None:
self._execute_query()
converter = connections[self.using].introspection.identifier_converter
return [converter(column_meta[0]) for column_meta in self.cursor.description]
def __iter__(self):
# Always execute a new query for a new iterator.
# This could be optimized with a cache at the expense of RAM.
self._execute_query()
if not connections[self.using].features.can_use_chunked_reads:
# If the database can't use chunked reads we need to make sure we
# evaluate the entire query up front.
result = list(self.cursor)
else:
result = self.cursor
return iter(result)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
@property
def params_type(self):
if self.params is None:
return None
return dict if isinstance(self.params, Mapping) else tuple
def __str__(self):
if self.params_type is None:
return self.sql
return self.sql % self.params_type(self.params)
def _execute_query(self):
connection = connections[self.using]
# Adapt parameters to the database, as much as possible considering
# that the target type isn't known. See #17755.
params_type = self.params_type
adapter = connection.ops.adapt_unknown_value
if params_type is tuple:
params = tuple(adapter(val) for val in self.params)
elif params_type is dict:
params = {key: adapter(val) for key, val in self.params.items()}
elif params_type is None:
params = None
else:
raise RuntimeError("Unexpected params type: %s" % params_type)
self.cursor = connection.cursor()
self.cursor.execute(self.sql, params)
ExplainInfo = namedtuple("ExplainInfo", ("format", "options"))
class Query(BaseExpression):
"""A single SQL query."""
alias_prefix = "T"
empty_result_set_value = None
subq_aliases = frozenset([alias_prefix])
compiler = "SQLCompiler"
base_table_class = BaseTable
join_class = Join
default_cols = True
default_ordering = True
standard_ordering = True
filter_is_sticky = False
subquery = False
# SQL-related attributes.
# Select and related select clauses are expressions to use in the SELECT
# clause of the query. The select is used for cases where we want to set up
# the select clause to contain other than default fields (values(),
# subqueries...). Note that annotations go to annotations dictionary.
select = ()
# The group_by attribute can have one of the following forms:
# - None: no group by at all in the query
# - A tuple of expressions: group by (at least) those expressions.
# String refs are also allowed for now.
# - True: group by all select fields of the model
# See compiler.get_group_by() for details.
group_by = None
order_by = ()
low_mark = 0 # Used for offset/limit.
high_mark = None # Used for offset/limit.
distinct = False
distinct_fields = ()
select_for_update = False
select_for_update_nowait = False
select_for_update_skip_locked = False
select_for_update_of = ()
select_for_no_key_update = False
select_related = False
# Arbitrary limit for select_related to prevents infinite recursion.
max_depth = 5
# Holds the selects defined by a call to values() or values_list()
# excluding annotation_select and extra_select.
values_select = ()
# SQL annotation-related attributes.
annotation_select_mask = None
_annotation_select_cache = None
# Set combination attributes.
combinator = None
combinator_all = False
combined_queries = ()
# These are for extensions. The contents are more or less appended verbatim
# to the appropriate clause.
extra_select_mask = None
_extra_select_cache = None
extra_tables = ()
extra_order_by = ()
# A tuple that is a set of model field names and either True, if these are
# the fields to defer, or False if these are the only fields to load.
deferred_loading = (frozenset(), True)
explain_info = None
def __init__(self, model, alias_cols=True):
self.model = model
self.alias_refcount = {}
# alias_map is the most important data structure regarding joins.
# It's used for recording which joins exist in the query and what
# types they are. The key is the alias of the joined table (possibly
# the table name) and the value is a Join-like object (see
# sql.datastructures.Join for more information).
self.alias_map = {}
# Whether to provide alias to columns during reference resolving.
self.alias_cols = alias_cols
# Sometimes the query contains references to aliases in outer queries (as
# a result of split_exclude). Correct alias quoting needs to know these
# aliases too.
# Map external tables to whether they are aliased.
self.external_aliases = {}
self.table_map = {} # Maps table names to list of aliases.
self.used_aliases = set()
self.where = WhereNode()
# Maps alias -> Annotation Expression.
self.annotations = {}
# These are for extensions. The contents are more or less appended
# verbatim to the appropriate clause.
self.extra = {} # Maps col_alias -> (col_sql, params).
self._filtered_relations = {}
@property
def output_field(self):
if len(self.select) == 1:
select = self.select[0]
return getattr(select, "target", None) or select.field
elif len(self.annotation_select) == 1:
return next(iter(self.annotation_select.values())).output_field
@property
def has_select_fields(self):
return bool(
self.select or self.annotation_select_mask or self.extra_select_mask
)
@cached_property
def base_table(self):
for alias in self.alias_map:
return alias
def __str__(self):
"""
Return the query as a string of SQL with the parameter values
substituted in (use sql_with_params() to see the unsubstituted string).
Parameter values won't necessarily be quoted correctly, since that is
done by the database interface at execution time.
"""
sql, params = self.sql_with_params()
return sql % params
def sql_with_params(self):
"""
Return the query as an SQL string and the parameters that will be
substituted into the query.
"""
return self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
def __deepcopy__(self, memo):
"""Limit the amount of work when a Query is deepcopied."""
result = self.clone()
memo[id(self)] = result
return result
def get_compiler(self, using=None, connection=None, elide_empty=True):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
return connection.ops.compiler(self.compiler)(
self, connection, using, elide_empty
)
def get_meta(self):
"""
Return the Options instance (the model._meta) from which to start
processing. Normally, this is self.model._meta, but it can be changed
by subclasses.
"""
if self.model:
return self.model._meta
def clone(self):
"""
Return a copy of the current Query. A lightweight alternative to
to deepcopy().
"""
obj = Empty()
obj.__class__ = self.__class__
# Copy references to everything.
obj.__dict__ = self.__dict__.copy()
# Clone attributes that can't use shallow copy.
obj.alias_refcount = self.alias_refcount.copy()
obj.alias_map = self.alias_map.copy()
obj.external_aliases = self.external_aliases.copy()
obj.table_map = self.table_map.copy()
obj.where = self.where.clone()
obj.annotations = self.annotations.copy()
if self.annotation_select_mask is not None:
obj.annotation_select_mask = self.annotation_select_mask.copy()
if self.combined_queries:
obj.combined_queries = tuple(
[query.clone() for query in self.combined_queries]
)
# _annotation_select_cache cannot be copied, as doing so breaks the
# (necessary) state in which both annotations and
# _annotation_select_cache point to the same underlying objects.
# It will get re-populated in the cloned queryset the next time it's
# used.
obj._annotation_select_cache = None
obj.extra = self.extra.copy()
if self.extra_select_mask is not None:
obj.extra_select_mask = self.extra_select_mask.copy()
if self._extra_select_cache is not None:
obj._extra_select_cache = self._extra_select_cache.copy()
if self.select_related is not False:
# Use deepcopy because select_related stores fields in nested
# dicts.
obj.select_related = copy.deepcopy(obj.select_related)
if "subq_aliases" in self.__dict__:
obj.subq_aliases = self.subq_aliases.copy()
obj.used_aliases = self.used_aliases.copy()
obj._filtered_relations = self._filtered_relations.copy()
# Clear the cached_property, if it exists.
obj.__dict__.pop("base_table", None)
return obj
def chain(self, klass=None):
"""
Return a copy of the current Query that's ready for another operation.
The klass argument changes the type of the Query, e.g. UpdateQuery.
"""
obj = self.clone()
if klass and obj.__class__ != klass:
obj.__class__ = klass
if not obj.filter_is_sticky:
obj.used_aliases = set()
obj.filter_is_sticky = False
if hasattr(obj, "_setup_query"):
obj._setup_query()
return obj
def relabeled_clone(self, change_map):
clone = self.clone()
clone.change_aliases(change_map)
return clone
def _get_col(self, target, field, alias):
if not self.alias_cols:
alias = None
return target.get_col(alias, field)
def rewrite_cols(self, annotation, col_cnt):
# We must make sure the inner query has the referred columns in it.
# If we are aggregating over an annotation, then Django uses Ref()
# instances to note this. However, if we are annotating over a column
# of a related model, then it might be that column isn't part of the
# SELECT clause of the inner query, and we must manually make sure
# the column is selected. An example case is:
# .aggregate(Sum('author__awards'))
# Resolving this expression results in a join to author, but there
# is no guarantee the awards column of author is in the select clause
# of the query. Thus we must manually add the column to the inner
# query.
orig_exprs = annotation.get_source_expressions()
new_exprs = []
for expr in orig_exprs:
# FIXME: These conditions are fairly arbitrary. Identify a better
# method of having expressions decide which code path they should
# take.
if isinstance(expr, Ref):
# Its already a Ref to subquery (see resolve_ref() for
# details)
new_exprs.append(expr)
elif isinstance(expr, (WhereNode, Lookup)):
# Decompose the subexpressions further. The code here is
# copied from the else clause, but this condition must appear
# before the contains_aggregate/is_summary condition below.
new_expr, col_cnt = self.rewrite_cols(expr, col_cnt)
new_exprs.append(new_expr)
else:
# Reuse aliases of expressions already selected in subquery.
for col_alias, selected_annotation in self.annotation_select.items():
if selected_annotation is expr:
new_expr = Ref(col_alias, expr)
break
else:
# An expression that is not selected the subquery.
if isinstance(expr, Col) or (
expr.contains_aggregate and not expr.is_summary
):
# Reference column or another aggregate. Select it
# under a non-conflicting alias.
col_cnt += 1
col_alias = "__col%d" % col_cnt
self.annotations[col_alias] = expr
self.append_annotation_mask([col_alias])
new_expr = Ref(col_alias, expr)
else:
# Some other expression not referencing database values
# directly. Its subexpression might contain Cols.
new_expr, col_cnt = self.rewrite_cols(expr, col_cnt)
new_exprs.append(new_expr)
annotation.set_source_expressions(new_exprs)
return annotation, col_cnt
def get_aggregation(self, using, added_aggregate_names):
"""
Return the dictionary with the values of the existing aggregations.
"""
if not self.annotation_select:
return {}
existing_annotations = [
annotation
for alias, annotation in self.annotations.items()
if alias not in added_aggregate_names
]
# Decide if we need to use a subquery.
#
# Existing annotations would cause incorrect results as get_aggregation()
# must produce just one result and thus must not use GROUP BY. But we
# aren't smart enough to remove the existing annotations from the
# query, so those would force us to use GROUP BY.
#
# If the query has limit or distinct, or uses set operations, then
# those operations must be done in a subquery so that the query
# aggregates on the limit and/or distinct results instead of applying
# the distinct and limit after the aggregation.
if (
isinstance(self.group_by, tuple)
or self.is_sliced
or existing_annotations
or self.distinct
or self.combinator
):
from django.db.models.sql.subqueries import AggregateQuery
inner_query = self.clone()
inner_query.subquery = True
outer_query = AggregateQuery(self.model, inner_query)
inner_query.select_for_update = False
inner_query.select_related = False
inner_query.set_annotation_mask(self.annotation_select)
# Queries with distinct_fields need ordering and when a limit is
# applied we must take the slice from the ordered query. Otherwise
# no need for ordering.
inner_query.clear_ordering(force=False)
if not inner_query.distinct:
# If the inner query uses default select and it has some
# aggregate annotations, then we must make sure the inner
# query is grouped by the main model's primary key. However,
# clearing the select clause can alter results if distinct is
# used.
has_existing_aggregate_annotations = any(
annotation
for annotation in existing_annotations
if getattr(annotation, "contains_aggregate", True)
)
if inner_query.default_cols and has_existing_aggregate_annotations:
inner_query.group_by = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
inner_query.default_cols = False
relabels = {t: "subquery" for t in inner_query.alias_map}
relabels[None] = "subquery"
# Remove any aggregates marked for reduction from the subquery
# and move them to the outer AggregateQuery.
col_cnt = 0
for alias, expression in list(inner_query.annotation_select.items()):
annotation_select_mask = inner_query.annotation_select_mask
if expression.is_summary:
expression, col_cnt = inner_query.rewrite_cols(expression, col_cnt)
outer_query.annotations[alias] = expression.relabeled_clone(
relabels
)
del inner_query.annotations[alias]
annotation_select_mask.remove(alias)
# Make sure the annotation_select wont use cached results.
inner_query.set_annotation_mask(inner_query.annotation_select_mask)
if (
inner_query.select == ()
and not inner_query.default_cols
and not inner_query.annotation_select_mask
):
# In case of Model.objects[0:3].count(), there would be no
# field selected in the inner query, yet we must use a subquery.
# So, make sure at least one field is selected.
inner_query.select = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
else:
outer_query = self
self.select = ()
self.default_cols = False
self.extra = {}
empty_set_result = [
expression.empty_result_set_value
for expression in outer_query.annotation_select.values()
]
elide_empty = not any(result is NotImplemented for result in empty_set_result)
outer_query.clear_ordering(force=True)
outer_query.clear_limits()
outer_query.select_for_update = False
outer_query.select_related = False
compiler = outer_query.get_compiler(using, elide_empty=elide_empty)
result = compiler.execute_sql(SINGLE)
if result is None:
result = empty_set_result
converters = compiler.get_converters(outer_query.annotation_select.values())
result = next(compiler.apply_converters((result,), converters))
return dict(zip(outer_query.annotation_select, result))
def get_count(self, using):
"""
Perform a COUNT() query using the current filter constraints.
"""
obj = self.clone()
obj.add_annotation(Count("*"), alias="__count", is_summary=True)
return obj.get_aggregation(using, ["__count"])["__count"]
def has_filters(self):
return self.where
def exists(self, using, limit=True):
q = self.clone()
if not q.distinct:
if q.group_by is True:
q.add_fields(
(f.attname for f in self.model._meta.concrete_fields), False
)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
q.set_group_by(allow_aliases=False)
q.clear_select_clause()
if q.combined_queries and q.combinator == "union":
limit_combined = connections[
using
].features.supports_slicing_ordering_in_compound
q.combined_queries = tuple(
combined_query.exists(using, limit=limit_combined)
for combined_query in q.combined_queries
)
q.clear_ordering(force=True)
if limit:
q.set_limits(high=1)
q.add_extra({"a": 1}, None, None, None, None, None)
q.set_extra_mask(["a"])
return q
def has_results(self, using):
q = self.exists(using)
compiler = q.get_compiler(using=using)
return compiler.has_results()
def explain(self, using, format=None, **options):
q = self.clone()
for option_name in options:
if (
not EXPLAIN_OPTIONS_PATTERN.fullmatch(option_name)
or "--" in option_name
):
raise ValueError(f"Invalid option name: {option_name!r}.")
q.explain_info = ExplainInfo(format, options)
compiler = q.get_compiler(using=using)
return "\n".join(compiler.explain_query())
def combine(self, rhs, connector):
"""
Merge the 'rhs' query into the current one (with any 'rhs' effects
being applied *after* (that is, "to the right of") anything in the
current query. 'rhs' is not modified during a call to this function.
The 'connector' parameter describes how to connect filters from the
'rhs' query.
"""
if self.model != rhs.model:
raise TypeError("Cannot combine queries on two different base models.")
if self.is_sliced:
raise TypeError("Cannot combine queries once a slice has been taken.")
if self.distinct != rhs.distinct:
raise TypeError("Cannot combine a unique query with a non-unique query.")
if self.distinct_fields != rhs.distinct_fields:
raise TypeError("Cannot combine queries with different distinct fields.")
# If lhs and rhs shares the same alias prefix, it is possible to have
# conflicting alias changes like T4 -> T5, T5 -> T6, which might end up
# as T4 -> T6 while combining two querysets. To prevent this, change an
# alias prefix of the rhs and update current aliases accordingly,
# except if the alias is the base table since it must be present in the
# query on both sides.
initial_alias = self.get_initial_alias()
rhs.bump_prefix(self, exclude={initial_alias})
# Work out how to relabel the rhs aliases, if necessary.
change_map = {}
conjunction = connector == AND
# Determine which existing joins can be reused. When combining the
# query with AND we must recreate all joins for m2m filters. When
# combining with OR we can reuse joins. The reason is that in AND
# case a single row can't fulfill a condition like:
# revrel__col=1 & revrel__col=2
# But, there might be two different related rows matching this
# condition. In OR case a single True is enough, so single row is
# enough, too.
#
# Note that we will be creating duplicate joins for non-m2m joins in
# the AND case. The results will be correct but this creates too many
# joins. This is something that could be fixed later on.
reuse = set() if conjunction else set(self.alias_map)
joinpromoter = JoinPromoter(connector, 2, False)
joinpromoter.add_votes(
j for j in self.alias_map if self.alias_map[j].join_type == INNER
)
rhs_votes = set()
# Now, add the joins from rhs query into the new query (skipping base
# table).
rhs_tables = list(rhs.alias_map)[1:]
for alias in rhs_tables:
join = rhs.alias_map[alias]
# If the left side of the join was already relabeled, use the
# updated alias.
join = join.relabeled_clone(change_map)
new_alias = self.join(join, reuse=reuse)
if join.join_type == INNER:
rhs_votes.add(new_alias)
# We can't reuse the same join again in the query. If we have two
# distinct joins for the same connection in rhs query, then the
# combined query must have two joins, too.
reuse.discard(new_alias)
if alias != new_alias:
change_map[alias] = new_alias
if not rhs.alias_refcount[alias]:
# The alias was unused in the rhs query. Unref it so that it
# will be unused in the new query, too. We have to add and
# unref the alias so that join promotion has information of
# the join type for the unused alias.
self.unref_alias(new_alias)
joinpromoter.add_votes(rhs_votes)
joinpromoter.update_join_types(self)
# Combine subqueries aliases to ensure aliases relabelling properly
# handle subqueries when combining where and select clauses.
self.subq_aliases |= rhs.subq_aliases
# Now relabel a copy of the rhs where-clause and add it to the current
# one.
w = rhs.where.clone()
w.relabel_aliases(change_map)
self.where.add(w, connector)
# Selection columns and extra extensions are those provided by 'rhs'.
if rhs.select:
self.set_select([col.relabeled_clone(change_map) for col in rhs.select])
else:
self.select = ()
if connector == OR:
# It would be nice to be able to handle this, but the queries don't
# really make sense (or return consistent value sets). Not worth
# the extra complexity when you can write a real query instead.
if self.extra and rhs.extra:
raise ValueError(
"When merging querysets using 'or', you cannot have "
"extra(select=...) on both sides."
)
self.extra.update(rhs.extra)
extra_select_mask = set()
if self.extra_select_mask is not None:
extra_select_mask.update(self.extra_select_mask)
if rhs.extra_select_mask is not None:
extra_select_mask.update(rhs.extra_select_mask)
if extra_select_mask:
self.set_extra_mask(extra_select_mask)
self.extra_tables += rhs.extra_tables
# Ordering uses the 'rhs' ordering, unless it has none, in which case
# the current ordering is used.
self.order_by = rhs.order_by or self.order_by
self.extra_order_by = rhs.extra_order_by or self.extra_order_by
def deferred_to_data(self, target):
"""
Convert the self.deferred_loading data structure to an alternate data
structure, describing the field that *will* be loaded. This is used to
compute the columns to select from the database and also by the
QuerySet class to work out which fields are being initialized on each
model. Models that have all their fields included aren't mentioned in
the result, only those that have field restrictions in place.
The "target" parameter is the instance that is populated (in place).
"""
field_names, defer = self.deferred_loading
if not field_names:
return
orig_opts = self.get_meta()
seen = {}
must_include = {orig_opts.concrete_model: {orig_opts.pk}}
for field_name in field_names:
parts = field_name.split(LOOKUP_SEP)
cur_model = self.model._meta.concrete_model
opts = orig_opts
for name in parts[:-1]:
old_model = cur_model
if name in self._filtered_relations:
name = self._filtered_relations[name].relation_name
source = opts.get_field(name)
if is_reverse_o2o(source):
cur_model = source.related_model
else:
cur_model = source.remote_field.model
opts = cur_model._meta
# Even if we're "just passing through" this model, we must add
# both the current model's pk and the related reference field
# (if it's not a reverse relation) to the things we select.
if not is_reverse_o2o(source):
must_include[old_model].add(source)
add_to_dict(must_include, cur_model, opts.pk)
field = opts.get_field(parts[-1])
is_reverse_object = field.auto_created and not field.concrete
model = field.related_model if is_reverse_object else field.model
model = model._meta.concrete_model
if model == opts.model:
model = cur_model
if not is_reverse_o2o(field):
add_to_dict(seen, model, field)
if defer:
# We need to load all fields for each model, except those that
# appear in "seen" (for all models that appear in "seen"). The only
# slight complexity here is handling fields that exist on parent
# models.
workset = {}
for model, values in seen.items():
for field in model._meta.local_fields:
if field not in values:
m = field.model._meta.concrete_model
add_to_dict(workset, m, field)
for model, values in must_include.items():
# If we haven't included a model in workset, we don't add the
# corresponding must_include fields for that model, since an
# empty set means "include all fields". That's why there's no
# "else" branch here.
if model in workset:
workset[model].update(values)
for model, fields in workset.items():
target[model] = {f.attname for f in fields}
else:
for model, values in must_include.items():
if model in seen:
seen[model].update(values)
else:
# As we've passed through this model, but not explicitly
# included any fields, we have to make sure it's mentioned
# so that only the "must include" fields are pulled in.
seen[model] = values
# Now ensure that every model in the inheritance chain is mentioned
# in the parent list. Again, it must be mentioned to ensure that
# only "must include" fields are pulled in.
for model in orig_opts.get_parent_list():
seen.setdefault(model, set())
for model, fields in seen.items():
target[model] = {f.attname for f in fields}
def table_alias(self, table_name, create=False, filtered_relation=None):
"""
Return a table alias for the given table_name and whether this is a
new alias or not.
If 'create' is true, a new alias is always created. Otherwise, the
most recently created alias for the table (if one exists) is reused.
"""
alias_list = self.table_map.get(table_name)
if not create and alias_list:
alias = alias_list[0]
self.alias_refcount[alias] += 1
return alias, False
# Create a new alias for this table.
if alias_list:
alias = "%s%d" % (self.alias_prefix, len(self.alias_map) + 1)
alias_list.append(alias)
else:
# The first occurrence of a table uses the table name directly.
alias = (
filtered_relation.alias if filtered_relation is not None else table_name
)
self.table_map[table_name] = [alias]
self.alias_refcount[alias] = 1
return alias, True
def ref_alias(self, alias):
"""Increases the reference count for this alias."""
self.alias_refcount[alias] += 1
def unref_alias(self, alias, amount=1):
"""Decreases the reference count for this alias."""
self.alias_refcount[alias] -= amount
def promote_joins(self, aliases):
"""
Promote recursively the join type of given aliases and its children to
an outer join. If 'unconditional' is False, only promote the join if
it is nullable or the parent join is an outer join.
The children promotion is done to avoid join chains that contain a LOUTER
b INNER c. So, if we have currently a INNER b INNER c and a->b is promoted,
then we must also promote b->c automatically, or otherwise the promotion
of a->b doesn't actually change anything in the query results.
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type is None:
# This is the base table (first FROM entry) - this table
# isn't really joined at all in the query, so we should not
# alter its join type.
continue
# Only the first alias (skipped above) should have None join_type
assert self.alias_map[alias].join_type is not None
parent_alias = self.alias_map[alias].parent_alias
parent_louter = (
parent_alias and self.alias_map[parent_alias].join_type == LOUTER
)
already_louter = self.alias_map[alias].join_type == LOUTER
if (self.alias_map[alias].nullable or parent_louter) and not already_louter:
self.alias_map[alias] = self.alias_map[alias].promote()
# Join type of 'alias' changed, so re-examine all aliases that
# refer to this one.
aliases.extend(
join
for join in self.alias_map
if self.alias_map[join].parent_alias == alias
and join not in aliases
)
def demote_joins(self, aliases):
"""
Change join type from LOUTER to INNER for all joins in aliases.
Similarly to promote_joins(), this method must ensure no join chains
containing first an outer, then an inner join are generated. If we
are demoting b->c join in chain a LOUTER b LOUTER c then we must
demote a->b automatically, or otherwise the demotion of b->c doesn't
actually change anything in the query results. .
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type == LOUTER:
self.alias_map[alias] = self.alias_map[alias].demote()
parent_alias = self.alias_map[alias].parent_alias
if self.alias_map[parent_alias].join_type == INNER:
aliases.append(parent_alias)
def reset_refcounts(self, to_counts):
"""
Reset reference counts for aliases so that they match the value passed
in `to_counts`.
"""
for alias, cur_refcount in self.alias_refcount.copy().items():
unref_amount = cur_refcount - to_counts.get(alias, 0)
self.unref_alias(alias, unref_amount)
def change_aliases(self, change_map):
"""
Change the aliases in change_map (which maps old-alias -> new-alias),
relabelling any references to them in select columns and the where
clause.
"""
# If keys and values of change_map were to intersect, an alias might be
# updated twice (e.g. T4 -> T5, T5 -> T6, so also T4 -> T6) depending
# on their order in change_map.
assert set(change_map).isdisjoint(change_map.values())
# 1. Update references in "select" (normal columns plus aliases),
# "group by" and "where".
self.where.relabel_aliases(change_map)
if isinstance(self.group_by, tuple):
self.group_by = tuple(
[col.relabeled_clone(change_map) for col in self.group_by]
)
self.select = tuple([col.relabeled_clone(change_map) for col in self.select])
self.annotations = self.annotations and {
key: col.relabeled_clone(change_map)
for key, col in self.annotations.items()
}
# 2. Rename the alias in the internal table/alias datastructures.
for old_alias, new_alias in change_map.items():
if old_alias not in self.alias_map:
continue
alias_data = self.alias_map[old_alias].relabeled_clone(change_map)
self.alias_map[new_alias] = alias_data
self.alias_refcount[new_alias] = self.alias_refcount[old_alias]
del self.alias_refcount[old_alias]
del self.alias_map[old_alias]
table_aliases = self.table_map[alias_data.table_name]
for pos, alias in enumerate(table_aliases):
if alias == old_alias:
table_aliases[pos] = new_alias
break
self.external_aliases = {
# Table is aliased or it's being changed and thus is aliased.
change_map.get(alias, alias): (aliased or alias in change_map)
for alias, aliased in self.external_aliases.items()
}
def bump_prefix(self, other_query, exclude=None):
"""
Change the alias prefix to the next letter in the alphabet in a way
that the other query's aliases and this query's aliases will not
conflict. Even tables that previously had no alias will get an alias
after this call. To prevent changing aliases use the exclude parameter.
"""
def prefix_gen():
"""
Generate a sequence of characters in alphabetical order:
-> 'A', 'B', 'C', ...
When the alphabet is finished, the sequence will continue with the
Cartesian product:
-> 'AA', 'AB', 'AC', ...
"""
alphabet = ascii_uppercase
prefix = chr(ord(self.alias_prefix) + 1)
yield prefix
for n in count(1):
seq = alphabet[alphabet.index(prefix) :] if prefix else alphabet
for s in product(seq, repeat=n):
yield "".join(s)
prefix = None
if self.alias_prefix != other_query.alias_prefix:
# No clashes between self and outer query should be possible.
return
# Explicitly avoid infinite loop. The constant divider is based on how
# much depth recursive subquery references add to the stack. This value
# might need to be adjusted when adding or removing function calls from
# the code path in charge of performing these operations.
local_recursion_limit = sys.getrecursionlimit() // 16
for pos, prefix in enumerate(prefix_gen()):
if prefix not in self.subq_aliases:
self.alias_prefix = prefix
break
if pos > local_recursion_limit:
raise RecursionError(
"Maximum recursion depth exceeded: too many subqueries."
)
self.subq_aliases = self.subq_aliases.union([self.alias_prefix])
other_query.subq_aliases = other_query.subq_aliases.union(self.subq_aliases)
if exclude is None:
exclude = {}
self.change_aliases(
{
alias: "%s%d" % (self.alias_prefix, pos)
for pos, alias in enumerate(self.alias_map)
if alias not in exclude
}
)
def get_initial_alias(self):
"""
Return the first alias for this query, after increasing its reference
count.
"""
if self.alias_map:
alias = self.base_table
self.ref_alias(alias)
elif self.model:
alias = self.join(self.base_table_class(self.get_meta().db_table, None))
else:
alias = None
return alias
def count_active_tables(self):
"""
Return the number of tables in this query with a non-zero reference
count. After execution, the reference counts are zeroed, so tables
added in compiler will not be seen by this method.
"""
return len([1 for count in self.alias_refcount.values() if count])
def join(self, join, reuse=None, reuse_with_filtered_relation=False):
"""
Return an alias for the 'join', either reusing an existing alias for
that join or creating a new one. 'join' is either a base_table_class or
join_class.
The 'reuse' parameter can be either None which means all joins are
reusable, or it can be a set containing the aliases that can be reused.
The 'reuse_with_filtered_relation' parameter is used when computing
FilteredRelation instances.
A join is always created as LOUTER if the lhs alias is LOUTER to make
sure chains like t1 LOUTER t2 INNER t3 aren't generated. All new
joins are created as LOUTER if the join is nullable.
"""
if reuse_with_filtered_relation and reuse:
reuse_aliases = [
a for a, j in self.alias_map.items() if a in reuse and j.equals(join)
]
else:
reuse_aliases = [
a
for a, j in self.alias_map.items()
if (reuse is None or a in reuse) and j == join
]
if reuse_aliases:
if join.table_alias in reuse_aliases:
reuse_alias = join.table_alias
else:
# Reuse the most recent alias of the joined table
# (a many-to-many relation may be joined multiple times).
reuse_alias = reuse_aliases[-1]
self.ref_alias(reuse_alias)
return reuse_alias
# No reuse is possible, so we need a new alias.
alias, _ = self.table_alias(
join.table_name, create=True, filtered_relation=join.filtered_relation
)
if join.join_type:
if self.alias_map[join.parent_alias].join_type == LOUTER or join.nullable:
join_type = LOUTER
else:
join_type = INNER
join.join_type = join_type
join.table_alias = alias
self.alias_map[alias] = join
return alias
def join_parent_model(self, opts, model, alias, seen):
"""
Make sure the given 'model' is joined in the query. If 'model' isn't
a parent of 'opts' or if it is None this method is a no-op.
The 'alias' is the root alias for starting the join, 'seen' is a dict
of model -> alias of existing joins. It must also contain a mapping
of None -> some alias. This will be returned in the no-op case.
"""
if model in seen:
return seen[model]
chain = opts.get_base_chain(model)
if not chain:
return alias
curr_opts = opts
for int_model in chain:
if int_model in seen:
curr_opts = int_model._meta
alias = seen[int_model]
continue
# Proxy model have elements in base chain
# with no parents, assign the new options
# object and skip to the next base in that
# case
if not curr_opts.parents[int_model]:
curr_opts = int_model._meta
continue
link_field = curr_opts.get_ancestor_link(int_model)
join_info = self.setup_joins([link_field.name], curr_opts, alias)
curr_opts = int_model._meta
alias = seen[int_model] = join_info.joins[-1]
return alias or seen[None]
def check_alias(self, alias):
if FORBIDDEN_ALIAS_PATTERN.search(alias):
raise ValueError(
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
def add_annotation(self, annotation, alias, is_summary=False, select=True):
"""Add a single annotation expression to the Query."""
self.check_alias(alias)
annotation = annotation.resolve_expression(
self, allow_joins=True, reuse=None, summarize=is_summary
)
if select:
self.append_annotation_mask([alias])
else:
self.set_annotation_mask(set(self.annotation_select).difference({alias}))
self.annotations[alias] = annotation
def resolve_expression(self, query, *args, **kwargs):
clone = self.clone()
# Subqueries need to use a different set of aliases than the outer query.
clone.bump_prefix(query)
clone.subquery = True
clone.where.resolve_expression(query, *args, **kwargs)
# Resolve combined queries.
if clone.combinator:
clone.combined_queries = tuple(
[
combined_query.resolve_expression(query, *args, **kwargs)
for combined_query in clone.combined_queries
]
)
for key, value in clone.annotations.items():
resolved = value.resolve_expression(query, *args, **kwargs)
if hasattr(resolved, "external_aliases"):
resolved.external_aliases.update(clone.external_aliases)
clone.annotations[key] = resolved
# Outer query's aliases are considered external.
for alias, table in query.alias_map.items():
clone.external_aliases[alias] = (
isinstance(table, Join)
and table.join_field.related_model._meta.db_table != alias
) or (
isinstance(table, BaseTable) and table.table_name != table.table_alias
)
return clone
def get_external_cols(self):
exprs = chain(self.annotations.values(), self.where.children)
return [
col
for col in self._gen_cols(exprs, include_external=True)
if col.alias in self.external_aliases
]
def get_group_by_cols(self, alias=None):
if alias:
return [Ref(alias, self)]
external_cols = self.get_external_cols()
if any(col.possibly_multivalued for col in external_cols):
return [self]
return external_cols
def as_sql(self, compiler, connection):
# Some backends (e.g. Oracle) raise an error when a subquery contains
# unnecessary ORDER BY clause.
if (
self.subquery
and not connection.features.ignores_unnecessary_order_by_in_subqueries
):
self.clear_ordering(force=False)
sql, params = self.get_compiler(connection=connection).as_sql()
if self.subquery:
sql = "(%s)" % sql
return sql, params
def resolve_lookup_value(self, value, can_reuse, allow_joins):
if hasattr(value, "resolve_expression"):
value = value.resolve_expression(
self,
reuse=can_reuse,
allow_joins=allow_joins,
)
elif isinstance(value, (list, tuple)):
# The items of the iterable may be expressions and therefore need
# to be resolved independently.
values = (
self.resolve_lookup_value(sub_value, can_reuse, allow_joins)
for sub_value in value
)
type_ = type(value)
if hasattr(type_, "_make"): # namedtuple
return type_(*values)
return type_(values)
return value
def solve_lookup_type(self, lookup):
"""
Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains').
"""
lookup_splitted = lookup.split(LOOKUP_SEP)
if self.annotations:
expression, expression_lookups = refs_expression(
lookup_splitted, self.annotations
)
if expression:
return expression_lookups, (), expression
_, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta())
field_parts = lookup_splitted[0 : len(lookup_splitted) - len(lookup_parts)]
if len(lookup_parts) > 1 and not field_parts:
raise FieldError(
'Invalid lookup "%s" for model %s".'
% (lookup, self.get_meta().model.__name__)
)
return lookup_parts, field_parts, False
def check_query_object_type(self, value, opts, field):
"""
Check whether the object passed while querying is of the correct type.
If not, raise a ValueError specifying the wrong object.
"""
if hasattr(value, "_meta"):
if not check_rel_lookup_compatibility(value._meta.model, opts, field):
raise ValueError(
'Cannot query "%s": Must be "%s" instance.'
% (value, opts.object_name)
)
def check_related_objects(self, field, value, opts):
"""Check the type of object passed to query relations."""
if field.is_relation:
# Check that the field and the queryset use the same model in a
# query like .filter(author=Author.objects.all()). For example, the
# opts would be Author's (from the author field) and value.model
# would be Author.objects.all() queryset's .model (Author also).
# The field is the related field on the lhs side.
if (
isinstance(value, Query)
and not value.has_select_fields
and not check_rel_lookup_compatibility(value.model, opts, field)
):
raise ValueError(
'Cannot use QuerySet for "%s": Use a QuerySet for "%s".'
% (value.model._meta.object_name, opts.object_name)
)
elif hasattr(value, "_meta"):
self.check_query_object_type(value, opts, field)
elif hasattr(value, "__iter__"):
for v in value:
self.check_query_object_type(v, opts, field)
def check_filterable(self, expression):
"""Raise an error if expression cannot be used in a WHERE clause."""
if hasattr(expression, "resolve_expression") and not getattr(
expression, "filterable", True
):
raise NotSupportedError(
expression.__class__.__name__ + " is disallowed in the filter "
"clause."
)
if hasattr(expression, "get_source_expressions"):
for expr in expression.get_source_expressions():
self.check_filterable(expr)
def build_lookup(self, lookups, lhs, rhs):
"""
Try to extract transforms and lookup from given lhs.
The lhs value is something that works like SQLExpression.
The rhs value is what the lookup is going to compare against.
The lookups is a list of names to extract using get_lookup()
and get_transform().
"""
# __exact is the default lookup if one isn't given.
*transforms, lookup_name = lookups or ["exact"]
for name in transforms:
lhs = self.try_transform(lhs, name)
# First try get_lookup() so that the lookup takes precedence if the lhs
# supports both transform and lookup for the name.
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
if lhs.field.is_relation:
raise FieldError(
"Related Field got invalid lookup: {}".format(lookup_name)
)
# A lookup wasn't found. Try to interpret the name as a transform
# and do an Exact lookup against it.
lhs = self.try_transform(lhs, lookup_name)
lookup_name = "exact"
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
return
lookup = lookup_class(lhs, rhs)
# Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
# uses of None as a query value unless the lookup supports it.
if lookup.rhs is None and not lookup.can_use_none_as_rhs:
if lookup_name not in ("exact", "iexact"):
raise ValueError("Cannot use None as a query value")
return lhs.get_lookup("isnull")(lhs, True)
# For Oracle '' is equivalent to null. The check must be done at this
# stage because join promotion can't be done in the compiler. Using
# DEFAULT_DB_ALIAS isn't nice but it's the best that can be done here.
# A similar thing is done in is_nullable(), too.
if (
lookup_name == "exact"
and lookup.rhs == ""
and connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
):
return lhs.get_lookup("isnull")(lhs, True)
return lookup
def try_transform(self, lhs, name):
"""
Helper method for build_lookup(). Try to fetch and initialize
a transform for name parameter from lhs.
"""
transform_class = lhs.get_transform(name)
if transform_class:
return transform_class(lhs)
else:
output_field = lhs.output_field.__class__
suggested_lookups = difflib.get_close_matches(
name, output_field.get_lookups()
)
if suggested_lookups:
suggestion = ", perhaps you meant %s?" % " or ".join(suggested_lookups)
else:
suggestion = "."
raise FieldError(
"Unsupported lookup '%s' for %s or join on the field not "
"permitted%s" % (name, output_field.__name__, suggestion)
)
def build_filter(
self,
filter_expr,
branch_negated=False,
current_negated=False,
can_reuse=None,
allow_joins=True,
split_subq=True,
reuse_with_filtered_relation=False,
check_filterable=True,
):
"""
Build a WhereNode for a single filter clause but don't add it
to this Query. Query.add_q() will then add this filter to the where
Node.
The 'branch_negated' tells us if the current branch contains any
negations. This will be used to determine if subqueries are needed.
The 'current_negated' is used to determine if the current filter is
negated or not and this will be used to determine if IS NULL filtering
is needed.
The difference between current_negated and branch_negated is that
branch_negated is set on first negation, but current_negated is
flipped for each negation.
Note that add_filter will not do any negating itself, that is done
upper in the code by add_q().
The 'can_reuse' is a set of reusable joins for multijoins.
If 'reuse_with_filtered_relation' is True, then only joins in can_reuse
will be reused.
The method will create a filter clause that can be added to the current
query. However, if the filter isn't added to the query then the caller
is responsible for unreffing the joins used.
"""
if isinstance(filter_expr, dict):
raise FieldError("Cannot parse keyword query as dict")
if isinstance(filter_expr, Q):
return self._add_q(
filter_expr,
branch_negated=branch_negated,
current_negated=current_negated,
used_aliases=can_reuse,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
)
if hasattr(filter_expr, "resolve_expression"):
if not getattr(filter_expr, "conditional", False):
raise TypeError("Cannot filter against a non-conditional expression.")
condition = filter_expr.resolve_expression(self, allow_joins=allow_joins)
if not isinstance(condition, Lookup):
condition = self.build_lookup(["exact"], condition, True)
return WhereNode([condition], connector=AND), []
arg, value = filter_expr
if not arg:
raise FieldError("Cannot parse keyword query %r" % arg)
lookups, parts, reffed_expression = self.solve_lookup_type(arg)
if check_filterable:
self.check_filterable(reffed_expression)
if not allow_joins and len(parts) > 1:
raise FieldError("Joined field references are not permitted in this query")
pre_joins = self.alias_refcount.copy()
value = self.resolve_lookup_value(value, can_reuse, allow_joins)
used_joins = {
k for k, v in self.alias_refcount.items() if v > pre_joins.get(k, 0)
}
if check_filterable:
self.check_filterable(value)
if reffed_expression:
condition = self.build_lookup(lookups, reffed_expression, value)
return WhereNode([condition], connector=AND), []
opts = self.get_meta()
alias = self.get_initial_alias()
allow_many = not branch_negated or not split_subq
try:
join_info = self.setup_joins(
parts,
opts,
alias,
can_reuse=can_reuse,
allow_many=allow_many,
reuse_with_filtered_relation=reuse_with_filtered_relation,
)
# Prevent iterator from being consumed by check_related_objects()
if isinstance(value, Iterator):
value = list(value)
self.check_related_objects(join_info.final_field, value, join_info.opts)
# split_exclude() needs to know which joins were generated for the
# lookup parts
self._lookup_joins = join_info.joins
except MultiJoin as e:
return self.split_exclude(filter_expr, can_reuse, e.names_with_path)
# Update used_joins before trimming since they are reused to determine
# which joins could be later promoted to INNER.
used_joins.update(join_info.joins)
targets, alias, join_list = self.trim_joins(
join_info.targets, join_info.joins, join_info.path
)
if can_reuse is not None:
can_reuse.update(join_list)
if join_info.final_field.is_relation:
# No support for transforms for relational fields
num_lookups = len(lookups)
if num_lookups > 1:
raise FieldError(
"Related Field got invalid lookup: {}".format(lookups[0])
)
if len(targets) == 1:
col = self._get_col(targets[0], join_info.final_field, alias)
else:
col = MultiColSource(
alias, targets, join_info.targets, join_info.final_field
)
else:
col = self._get_col(targets[0], join_info.final_field, alias)
condition = self.build_lookup(lookups, col, value)
lookup_type = condition.lookup_name
clause = WhereNode([condition], connector=AND)
require_outer = (
lookup_type == "isnull" and condition.rhs is True and not current_negated
)
if (
current_negated
and (lookup_type != "isnull" or condition.rhs is False)
and condition.rhs is not None
):
require_outer = True
if lookup_type != "isnull":
# The condition added here will be SQL like this:
# NOT (col IS NOT NULL), where the first NOT is added in
# upper layers of code. The reason for addition is that if col
# is null, then col != someval will result in SQL "unknown"
# which isn't the same as in Python. The Python None handling
# is wanted, and it can be gotten by
# (col IS NULL OR col != someval)
# <=>
# NOT (col IS NOT NULL AND col = someval).
if (
self.is_nullable(targets[0])
or self.alias_map[join_list[-1]].join_type == LOUTER
):
lookup_class = targets[0].get_lookup("isnull")
col = self._get_col(targets[0], join_info.targets[0], alias)
clause.add(lookup_class(col, False), AND)
# If someval is a nullable column, someval IS NOT NULL is
# added.
if isinstance(value, Col) and self.is_nullable(value.target):
lookup_class = value.target.get_lookup("isnull")
clause.add(lookup_class(value, False), AND)
return clause, used_joins if not require_outer else ()
def add_filter(self, filter_lhs, filter_rhs):
self.add_q(Q((filter_lhs, filter_rhs)))
def add_q(self, q_object):
"""
A preprocessor for the internal _add_q(). Responsible for doing final
join promotion.
"""
# For join promotion this case is doing an AND for the added q_object
# and existing conditions. So, any existing inner join forces the join
# type to remain inner. Existing outer joins can however be demoted.
# (Consider case where rel_a is LOUTER and rel_a__col=1 is added - if
# rel_a doesn't produce any rows, then the whole condition must fail.
# So, demotion is OK.
existing_inner = {
a for a in self.alias_map if self.alias_map[a].join_type == INNER
}
clause, _ = self._add_q(q_object, self.used_aliases)
if clause:
self.where.add(clause, AND)
self.demote_joins(existing_inner)
def build_where(self, filter_expr):
return self.build_filter(filter_expr, allow_joins=False)[0]
def clear_where(self):
self.where = WhereNode()
def _add_q(
self,
q_object,
used_aliases,
branch_negated=False,
current_negated=False,
allow_joins=True,
split_subq=True,
check_filterable=True,
):
"""Add a Q-object to the current filter."""
connector = q_object.connector
current_negated = current_negated ^ q_object.negated
branch_negated = branch_negated or q_object.negated
target_clause = WhereNode(connector=connector, negated=q_object.negated)
joinpromoter = JoinPromoter(
q_object.connector, len(q_object.children), current_negated
)
for child in q_object.children:
child_clause, needed_inner = self.build_filter(
child,
can_reuse=used_aliases,
branch_negated=branch_negated,
current_negated=current_negated,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
)
joinpromoter.add_votes(needed_inner)
if child_clause:
target_clause.add(child_clause, connector)
needed_inner = joinpromoter.update_join_types(self)
return target_clause, needed_inner
def build_filtered_relation_q(
self, q_object, reuse, branch_negated=False, current_negated=False
):
"""Add a FilteredRelation object to the current filter."""
connector = q_object.connector
current_negated ^= q_object.negated
branch_negated = branch_negated or q_object.negated
target_clause = WhereNode(connector=connector, negated=q_object.negated)
for child in q_object.children:
if isinstance(child, Node):
child_clause = self.build_filtered_relation_q(
child,
reuse=reuse,
branch_negated=branch_negated,
current_negated=current_negated,
)
else:
child_clause, _ = self.build_filter(
child,
can_reuse=reuse,
branch_negated=branch_negated,
current_negated=current_negated,
allow_joins=True,
split_subq=False,
reuse_with_filtered_relation=True,
)
target_clause.add(child_clause, connector)
return target_clause
def add_filtered_relation(self, filtered_relation, alias):
filtered_relation.alias = alias
lookups = dict(get_children_from_q(filtered_relation.condition))
relation_lookup_parts, relation_field_parts, _ = self.solve_lookup_type(
filtered_relation.relation_name
)
if relation_lookup_parts:
raise ValueError(
"FilteredRelation's relation_name cannot contain lookups "
"(got %r)." % filtered_relation.relation_name
)
for lookup in chain(lookups):
lookup_parts, lookup_field_parts, _ = self.solve_lookup_type(lookup)
shift = 2 if not lookup_parts else 1
lookup_field_path = lookup_field_parts[:-shift]
for idx, lookup_field_part in enumerate(lookup_field_path):
if len(relation_field_parts) > idx:
if relation_field_parts[idx] != lookup_field_part:
raise ValueError(
"FilteredRelation's condition doesn't support "
"relations outside the %r (got %r)."
% (filtered_relation.relation_name, lookup)
)
else:
raise ValueError(
"FilteredRelation's condition doesn't support nested "
"relations deeper than the relation_name (got %r for "
"%r)." % (lookup, filtered_relation.relation_name)
)
self._filtered_relations[filtered_relation.alias] = filtered_relation
def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False):
"""
Walk the list of names and turns them into PathInfo tuples. A single
name in 'names' can generate multiple PathInfos (m2m, for example).
'names' is the path of names to travel, 'opts' is the model Options we
start the name resolving from, 'allow_many' is as for setup_joins().
If fail_on_missing is set to True, then a name that can't be resolved
will generate a FieldError.
Return a list of PathInfo tuples. In addition return the final field
(the last used join field) and target (which is a field guaranteed to
contain the same value as the final field). Finally, return those names
that weren't found (which are likely transforms and the final lookup).
"""
path, names_with_path = [], []
for pos, name in enumerate(names):
cur_names_with_path = (name, [])
if name == "pk":
name = opts.pk.name
field = None
filtered_relation = None
try:
if opts is None:
raise FieldDoesNotExist
field = opts.get_field(name)
except FieldDoesNotExist:
if name in self.annotation_select:
field = self.annotation_select[name].output_field
elif name in self._filtered_relations and pos == 0:
filtered_relation = self._filtered_relations[name]
if LOOKUP_SEP in filtered_relation.relation_name:
parts = filtered_relation.relation_name.split(LOOKUP_SEP)
filtered_relation_path, field, _, _ = self.names_to_path(
parts,
opts,
allow_many,
fail_on_missing,
)
path.extend(filtered_relation_path[:-1])
else:
field = opts.get_field(filtered_relation.relation_name)
if field is not None:
# Fields that contain one-to-many relations with a generic
# model (like a GenericForeignKey) cannot generate reverse
# relations and therefore cannot be used for reverse querying.
if field.is_relation and not field.related_model:
raise FieldError(
"Field %r does not generate an automatic reverse "
"relation and therefore cannot be used for reverse "
"querying. If it is a GenericForeignKey, consider "
"adding a GenericRelation." % name
)
try:
model = field.model._meta.concrete_model
except AttributeError:
# QuerySet.annotate() may introduce fields that aren't
# attached to a model.
model = None
else:
# We didn't find the current field, so move position back
# one step.
pos -= 1
if pos == -1 or fail_on_missing:
available = sorted(
[
*get_field_names_from_opts(opts),
*self.annotation_select,
*self._filtered_relations,
]
)
raise FieldError(
"Cannot resolve keyword '%s' into field. "
"Choices are: %s" % (name, ", ".join(available))
)
break
# Check if we need any joins for concrete inheritance cases (the
# field lives in parent, but we are currently in one of its
# children)
if opts is not None and model is not opts.model:
path_to_parent = opts.get_path_to_parent(model)
if path_to_parent:
path.extend(path_to_parent)
cur_names_with_path[1].extend(path_to_parent)
opts = path_to_parent[-1].to_opts
if hasattr(field, "path_infos"):
if filtered_relation:
pathinfos = field.get_path_info(filtered_relation)
else:
pathinfos = field.path_infos
if not allow_many:
for inner_pos, p in enumerate(pathinfos):
if p.m2m:
cur_names_with_path[1].extend(pathinfos[0 : inner_pos + 1])
names_with_path.append(cur_names_with_path)
raise MultiJoin(pos + 1, names_with_path)
last = pathinfos[-1]
path.extend(pathinfos)
final_field = last.join_field
opts = last.to_opts
targets = last.target_fields
cur_names_with_path[1].extend(pathinfos)
names_with_path.append(cur_names_with_path)
else:
# Local non-relational field.
final_field = field
targets = (field,)
if fail_on_missing and pos + 1 != len(names):
raise FieldError(
"Cannot resolve keyword %r into field. Join on '%s'"
" not permitted." % (names[pos + 1], name)
)
break
return path, final_field, targets, names[pos + 1 :]
def setup_joins(
self,
names,
opts,
alias,
can_reuse=None,
allow_many=True,
reuse_with_filtered_relation=False,
):
"""
Compute the necessary table joins for the passage through the fields
given in 'names'. 'opts' is the Options class for the current model
(which gives the table we are starting from), 'alias' is the alias for
the table to start the joining from.
The 'can_reuse' defines the reverse foreign key joins we can reuse. It
can be None in which case all joins are reusable or a set of aliases
that can be reused. Note that non-reverse foreign keys are always
reusable when using setup_joins().
The 'reuse_with_filtered_relation' can be used to force 'can_reuse'
parameter and force the relation on the given connections.
If 'allow_many' is False, then any reverse foreign key seen will
generate a MultiJoin exception.
Return the final field involved in the joins, the target field (used
for any 'where' constraint), the final 'opts' value, the joins, the
field path traveled to generate the joins, and a transform function
that takes a field and alias and is equivalent to `field.get_col(alias)`
in the simple case but wraps field transforms if they were included in
names.
The target field is the field containing the concrete value. Final
field can be something different, for example foreign key pointing to
that value. Final field is needed for example in some value
conversions (convert 'obj' in fk__id=obj to pk val using the foreign
key field for example).
"""
joins = [alias]
# The transform can't be applied yet, as joins must be trimmed later.
# To avoid making every caller of this method look up transforms
# directly, compute transforms here and create a partial that converts
# fields to the appropriate wrapped version.
def final_transformer(field, alias):
if not self.alias_cols:
alias = None
return field.get_col(alias)
# Try resolving all the names as fields first. If there's an error,
# treat trailing names as lookups until a field can be resolved.
last_field_exception = None
for pivot in range(len(names), 0, -1):
try:
path, final_field, targets, rest = self.names_to_path(
names[:pivot],
opts,
allow_many,
fail_on_missing=True,
)
except FieldError as exc:
if pivot == 1:
# The first item cannot be a lookup, so it's safe
# to raise the field error here.
raise
else:
last_field_exception = exc
else:
# The transforms are the remaining items that couldn't be
# resolved into fields.
transforms = names[pivot:]
break
for name in transforms:
def transform(field, alias, *, name, previous):
try:
wrapped = previous(field, alias)
return self.try_transform(wrapped, name)
except FieldError:
# FieldError is raised if the transform doesn't exist.
if isinstance(final_field, Field) and last_field_exception:
raise last_field_exception
else:
raise
final_transformer = functools.partial(
transform, name=name, previous=final_transformer
)
# Then, add the path to the query's joins. Note that we can't trim
# joins at this stage - we will need the information about join type
# of the trimmed joins.
for join in path:
if join.filtered_relation:
filtered_relation = join.filtered_relation.clone()
table_alias = filtered_relation.alias
else:
filtered_relation = None
table_alias = None
opts = join.to_opts
if join.direct:
nullable = self.is_nullable(join.join_field)
else:
nullable = True
connection = self.join_class(
opts.db_table,
alias,
table_alias,
INNER,
join.join_field,
nullable,
filtered_relation=filtered_relation,
)
reuse = can_reuse if join.m2m or reuse_with_filtered_relation else None
alias = self.join(
connection,
reuse=reuse,
reuse_with_filtered_relation=reuse_with_filtered_relation,
)
joins.append(alias)
if filtered_relation:
filtered_relation.path = joins[:]
return JoinInfo(final_field, targets, opts, joins, path, final_transformer)
def trim_joins(self, targets, joins, path):
"""
The 'target' parameter is the final field being joined to, 'joins'
is the full list of join aliases. The 'path' contain the PathInfos
used to create the joins.
Return the final target field and table alias and the new active
joins.
Always trim any direct join if the target column is already in the
previous table. Can't trim reverse joins as it's unknown if there's
anything on the other side of the join.
"""
joins = joins[:]
for pos, info in enumerate(reversed(path)):
if len(joins) == 1 or not info.direct:
break
if info.filtered_relation:
break
join_targets = {t.column for t in info.join_field.foreign_related_fields}
cur_targets = {t.column for t in targets}
if not cur_targets.issubset(join_targets):
break
targets_dict = {
r[1].column: r[0]
for r in info.join_field.related_fields
if r[1].column in cur_targets
}
targets = tuple(targets_dict[t.column] for t in targets)
self.unref_alias(joins.pop())
return targets, joins[-1], joins
@classmethod
def _gen_cols(cls, exprs, include_external=False):
for expr in exprs:
if isinstance(expr, Col):
yield expr
elif include_external and callable(
getattr(expr, "get_external_cols", None)
):
yield from expr.get_external_cols()
elif hasattr(expr, "get_source_expressions"):
yield from cls._gen_cols(
expr.get_source_expressions(),
include_external=include_external,
)
@classmethod
def _gen_col_aliases(cls, exprs):
yield from (expr.alias for expr in cls._gen_cols(exprs))
def resolve_ref(self, name, allow_joins=True, reuse=None, summarize=False):
annotation = self.annotations.get(name)
if annotation is not None:
if not allow_joins:
for alias in self._gen_col_aliases([annotation]):
if isinstance(self.alias_map[alias], Join):
raise FieldError(
"Joined field references are not permitted in this query"
)
if summarize:
# Summarize currently means we are doing an aggregate() query
# which is executed as a wrapped subquery if any of the
# aggregate() elements reference an existing annotation. In
# that case we need to return a Ref to the subquery's annotation.
if name not in self.annotation_select:
raise FieldError(
"Cannot aggregate over the '%s' alias. Use annotate() "
"to promote it." % name
)
return Ref(name, self.annotation_select[name])
else:
return annotation
else:
field_list = name.split(LOOKUP_SEP)
annotation = self.annotations.get(field_list[0])
if annotation is not None:
for transform in field_list[1:]:
annotation = self.try_transform(annotation, transform)
return annotation
join_info = self.setup_joins(
field_list, self.get_meta(), self.get_initial_alias(), can_reuse=reuse
)
targets, final_alias, join_list = self.trim_joins(
join_info.targets, join_info.joins, join_info.path
)
if not allow_joins and len(join_list) > 1:
raise FieldError(
"Joined field references are not permitted in this query"
)
if len(targets) > 1:
raise FieldError(
"Referencing multicolumn fields with F() objects isn't supported"
)
# Verify that the last lookup in name is a field or a transform:
# transform_function() raises FieldError if not.
transform = join_info.transform_function(targets[0], final_alias)
if reuse is not None:
reuse.update(join_list)
return transform
def split_exclude(self, filter_expr, can_reuse, names_with_path):
"""
When doing an exclude against any kind of N-to-many relation, we need
to use a subquery. This method constructs the nested query, given the
original exclude filter (filter_expr) and the portion up to the first
N-to-many relation field.
For example, if the origin filter is ~Q(child__name='foo'), filter_expr
is ('child__name', 'foo') and can_reuse is a set of joins usable for
filters in the original query.
We will turn this into equivalent of:
WHERE NOT EXISTS(
SELECT 1
FROM child
WHERE name = 'foo' AND child.parent_id = parent.id
LIMIT 1
)
"""
# Generate the inner query.
query = self.__class__(self.model)
query._filtered_relations = self._filtered_relations
filter_lhs, filter_rhs = filter_expr
if isinstance(filter_rhs, OuterRef):
filter_rhs = OuterRef(filter_rhs)
elif isinstance(filter_rhs, F):
filter_rhs = OuterRef(filter_rhs.name)
query.add_filter(filter_lhs, filter_rhs)
query.clear_ordering(force=True)
# Try to have as simple as possible subquery -> trim leading joins from
# the subquery.
trimmed_prefix, contains_louter = query.trim_start(names_with_path)
col = query.select[0]
select_field = col.target
alias = col.alias
if alias in can_reuse:
pk = select_field.model._meta.pk
# Need to add a restriction so that outer query's filters are in effect for
# the subquery, too.
query.bump_prefix(self)
lookup_class = select_field.get_lookup("exact")
# Note that the query.select[0].alias is different from alias
# due to bump_prefix above.
lookup = lookup_class(pk.get_col(query.select[0].alias), pk.get_col(alias))
query.where.add(lookup, AND)
query.external_aliases[alias] = True
lookup_class = select_field.get_lookup("exact")
lookup = lookup_class(col, ResolvedOuterRef(trimmed_prefix))
query.where.add(lookup, AND)
condition, needed_inner = self.build_filter(Exists(query))
if contains_louter:
or_null_condition, _ = self.build_filter(
("%s__isnull" % trimmed_prefix, True),
current_negated=True,
branch_negated=True,
can_reuse=can_reuse,
)
condition.add(or_null_condition, OR)
# Note that the end result will be:
# (outercol NOT IN innerq AND outercol IS NOT NULL) OR outercol IS NULL.
# This might look crazy but due to how IN works, this seems to be
# correct. If the IS NOT NULL check is removed then outercol NOT
# IN will return UNKNOWN. If the IS NULL check is removed, then if
# outercol IS NULL we will not match the row.
return condition, needed_inner
def set_empty(self):
self.where.add(NothingNode(), AND)
for query in self.combined_queries:
query.set_empty()
def is_empty(self):
return any(isinstance(c, NothingNode) for c in self.where.children)
def set_limits(self, low=None, high=None):
"""
Adjust the limits on the rows retrieved. Use low/high to set these,
as it makes it more Pythonic to read and write. When the SQL query is
created, convert them to the appropriate offset and limit values.
Apply any limits passed in here to the existing constraints. Add low
to the current low value and clamp both to any existing high value.
"""
if high is not None:
if self.high_mark is not None:
self.high_mark = min(self.high_mark, self.low_mark + high)
else:
self.high_mark = self.low_mark + high
if low is not None:
if self.high_mark is not None:
self.low_mark = min(self.high_mark, self.low_mark + low)
else:
self.low_mark = self.low_mark + low
if self.low_mark == self.high_mark:
self.set_empty()
def clear_limits(self):
"""Clear any existing limits."""
self.low_mark, self.high_mark = 0, None
@property
def is_sliced(self):
return self.low_mark != 0 or self.high_mark is not None
def has_limit_one(self):
return self.high_mark is not None and (self.high_mark - self.low_mark) == 1
def can_filter(self):
"""
Return True if adding filters to this instance is still possible.
Typically, this means no limits or offsets have been put on the results.
"""
return not self.is_sliced
def clear_select_clause(self):
"""Remove all fields from SELECT clause."""
self.select = ()
self.default_cols = False
self.select_related = False
self.set_extra_mask(())
self.set_annotation_mask(())
def clear_select_fields(self):
"""
Clear the list of fields to select (but not extra_select columns).
Some queryset types completely replace any existing list of select
columns.
"""
self.select = ()
self.values_select = ()
def add_select_col(self, col, name):
self.select += (col,)
self.values_select += (name,)
def set_select(self, cols):
self.default_cols = False
self.select = tuple(cols)
def add_distinct_fields(self, *field_names):
"""
Add and resolve the given fields to the query's "distinct on" clause.
"""
self.distinct_fields = field_names
self.distinct = True
def add_fields(self, field_names, allow_m2m=True):
"""
Add the given (model) fields to the select set. Add the field names in
the order specified.
"""
alias = self.get_initial_alias()
opts = self.get_meta()
try:
cols = []
for name in field_names:
# Join promotion note - we must not remove any rows here, so
# if there is no existing joins, use outer join.
join_info = self.setup_joins(
name.split(LOOKUP_SEP), opts, alias, allow_many=allow_m2m
)
targets, final_alias, joins = self.trim_joins(
join_info.targets,
join_info.joins,
join_info.path,
)
for target in targets:
cols.append(join_info.transform_function(target, final_alias))
if cols:
self.set_select(cols)
except MultiJoin:
raise FieldError("Invalid field name: '%s'" % name)
except FieldError:
if LOOKUP_SEP in name:
# For lookups spanning over relationships, show the error
# from the model on which the lookup failed.
raise
elif name in self.annotations:
raise FieldError(
"Cannot select the '%s' alias. Use annotate() to promote "
"it." % name
)
else:
names = sorted(
[
*get_field_names_from_opts(opts),
*self.extra,
*self.annotation_select,
*self._filtered_relations,
]
)
raise FieldError(
"Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names))
)
def add_ordering(self, *ordering):
"""
Add items from the 'ordering' sequence to the query's "order by"
clause. These items are either field names (not column names) --
possibly with a direction prefix ('-' or '?') -- or OrderBy
expressions.
If 'ordering' is empty, clear all ordering from the query.
"""
errors = []
for item in ordering:
if isinstance(item, str):
if item == "?":
continue
if item.startswith("-"):
item = item[1:]
if item in self.annotations:
continue
if self.extra and item in self.extra:
continue
# names_to_path() validates the lookup. A descriptive
# FieldError will be raise if it's not.
self.names_to_path(item.split(LOOKUP_SEP), self.model._meta)
elif not hasattr(item, "resolve_expression"):
errors.append(item)
if getattr(item, "contains_aggregate", False):
raise FieldError(
"Using an aggregate in order_by() without also including "
"it in annotate() is not allowed: %s" % item
)
if errors:
raise FieldError("Invalid order_by arguments: %s" % errors)
if ordering:
self.order_by += ordering
else:
self.default_ordering = False
def clear_ordering(self, force=False, clear_default=True):
"""
Remove any ordering settings if the current query allows it without
side effects, set 'force' to True to clear the ordering regardless.
If 'clear_default' is True, there will be no ordering in the resulting
query (not even the model's default).
"""
if not force and (
self.is_sliced or self.distinct_fields or self.select_for_update
):
return
self.order_by = ()
self.extra_order_by = ()
if clear_default:
self.default_ordering = False
def set_group_by(self, allow_aliases=True):
"""
Expand the GROUP BY clause required by the query.
This will usually be the set of all non-aggregate fields in the
return data. If the database backend supports grouping by the
primary key, and the query would be equivalent, the optimization
will be made automatically.
"""
# Column names from JOINs to check collisions with aliases.
if allow_aliases:
column_names = set()
seen_models = set()
for join in list(self.alias_map.values())[1:]: # Skip base table.
model = join.join_field.related_model
if model not in seen_models:
column_names.update(
{field.column for field in model._meta.local_concrete_fields}
)
seen_models.add(model)
group_by = list(self.select)
if self.annotation_select:
for alias, annotation in self.annotation_select.items():
if not allow_aliases or alias in column_names:
alias = None
group_by_cols = annotation.get_group_by_cols(alias=alias)
group_by.extend(group_by_cols)
self.group_by = tuple(group_by)
def add_select_related(self, fields):
"""
Set up the select_related data structure so that we only select
certain related models (as opposed to all models, when
self.select_related=True).
"""
if isinstance(self.select_related, bool):
field_dict = {}
else:
field_dict = self.select_related
for field in fields:
d = field_dict
for part in field.split(LOOKUP_SEP):
d = d.setdefault(part, {})
self.select_related = field_dict
def add_extra(self, select, select_params, where, params, tables, order_by):
"""
Add data to the various extra_* attributes for user-created additions
to the query.
"""
if select:
# We need to pair any placeholder markers in the 'select'
# dictionary with their parameters in 'select_params' so that
# subsequent updates to the select dictionary also adjust the
# parameters appropriately.
select_pairs = {}
if select_params:
param_iter = iter(select_params)
else:
param_iter = iter([])
for name, entry in select.items():
self.check_alias(name)
entry = str(entry)
entry_params = []
pos = entry.find("%s")
while pos != -1:
if pos == 0 or entry[pos - 1] != "%":
entry_params.append(next(param_iter))
pos = entry.find("%s", pos + 2)
select_pairs[name] = (entry, entry_params)
self.extra.update(select_pairs)
if where or params:
self.where.add(ExtraWhere(where, params), AND)
if tables:
self.extra_tables += tuple(tables)
if order_by:
self.extra_order_by = order_by
def clear_deferred_loading(self):
"""Remove any fields from the deferred loading set."""
self.deferred_loading = (frozenset(), True)
def add_deferred_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
exclude from loading from the database when automatic column selection
is done. Add the new field names to any existing field names that
are deferred (or removed from any existing field names that are marked
as the only ones for immediate loading).
"""
# Fields on related models are stored in the literal double-underscore
# format, so that we can use a set datastructure. We do the foo__bar
# splitting and handling when computing the SQL column names (as part of
# get_columns()).
existing, defer = self.deferred_loading
if defer:
# Add to existing deferred names.
self.deferred_loading = existing.union(field_names), True
else:
# Remove names from the set of any existing "immediate load" names.
if new_existing := existing.difference(field_names):
self.deferred_loading = new_existing, False
else:
self.clear_deferred_loading()
if new_only := set(field_names).difference(existing):
self.deferred_loading = new_only, True
def add_immediate_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
retrieve when the SQL is executed ("immediate loading" fields). The
field names replace any existing immediate loading field names. If
there are field names already specified for deferred loading, remove
those names from the new field_names before storing the new names
for immediate loading. (That is, immediate loading overrides any
existing immediate values, but respects existing deferrals.)
"""
existing, defer = self.deferred_loading
field_names = set(field_names)
if "pk" in field_names:
field_names.remove("pk")
field_names.add(self.get_meta().pk.name)
if defer:
# Remove any existing deferred names from the current set before
# setting the new names.
self.deferred_loading = field_names.difference(existing), False
else:
# Replace any existing "immediate load" field names.
self.deferred_loading = frozenset(field_names), False
def set_annotation_mask(self, names):
"""Set the mask of annotations that will be returned by the SELECT."""
if names is None:
self.annotation_select_mask = None
else:
self.annotation_select_mask = set(names)
self._annotation_select_cache = None
def append_annotation_mask(self, names):
if self.annotation_select_mask is not None:
self.set_annotation_mask(self.annotation_select_mask.union(names))
def set_extra_mask(self, names):
"""
Set the mask of extra select items that will be returned by SELECT.
Don't remove them from the Query since they might be used later.
"""
if names is None:
self.extra_select_mask = None
else:
self.extra_select_mask = set(names)
self._extra_select_cache = None
def set_values(self, fields):
self.select_related = False
self.clear_deferred_loading()
self.clear_select_fields()
if fields:
field_names = []
extra_names = []
annotation_names = []
if not self.extra and not self.annotations:
# Shortcut - if there are no extra or annotations, then
# the values() clause must be just field names.
field_names = list(fields)
else:
self.default_cols = False
for f in fields:
if f in self.extra_select:
extra_names.append(f)
elif f in self.annotation_select:
annotation_names.append(f)
else:
field_names.append(f)
self.set_extra_mask(extra_names)
self.set_annotation_mask(annotation_names)
selected = frozenset(field_names + extra_names + annotation_names)
else:
field_names = [f.attname for f in self.model._meta.concrete_fields]
selected = frozenset(field_names)
# Selected annotations must be known before setting the GROUP BY
# clause.
if self.group_by is True:
self.add_fields(
(f.attname for f in self.model._meta.concrete_fields), False
)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
self.set_group_by(allow_aliases=False)
self.clear_select_fields()
elif self.group_by:
# Resolve GROUP BY annotation references if they are not part of
# the selected fields anymore.
group_by = []
for expr in self.group_by:
if isinstance(expr, Ref) and expr.refs not in selected:
expr = self.annotations[expr.refs]
group_by.append(expr)
self.group_by = tuple(group_by)
self.values_select = tuple(field_names)
self.add_fields(field_names, True)
@property
def annotation_select(self):
"""
Return the dictionary of aggregate columns that are not masked and
should be used in the SELECT clause. Cache this result for performance.
"""
if self._annotation_select_cache is not None:
return self._annotation_select_cache
elif not self.annotations:
return {}
elif self.annotation_select_mask is not None:
self._annotation_select_cache = {
k: v
for k, v in self.annotations.items()
if k in self.annotation_select_mask
}
return self._annotation_select_cache
else:
return self.annotations
@property
def extra_select(self):
if self._extra_select_cache is not None:
return self._extra_select_cache
if not self.extra:
return {}
elif self.extra_select_mask is not None:
self._extra_select_cache = {
k: v for k, v in self.extra.items() if k in self.extra_select_mask
}
return self._extra_select_cache
else:
return self.extra
def trim_start(self, names_with_path):
"""
Trim joins from the start of the join path. The candidates for trim
are the PathInfos in names_with_path structure that are m2m joins.
Also set the select column so the start matches the join.
This method is meant to be used for generating the subquery joins &
cols in split_exclude().
Return a lookup usable for doing outerq.filter(lookup=self) and a
boolean indicating if the joins in the prefix contain a LEFT OUTER join.
_"""
all_paths = []
for _, paths in names_with_path:
all_paths.extend(paths)
contains_louter = False
# Trim and operate only on tables that were generated for
# the lookup part of the query. That is, avoid trimming
# joins generated for F() expressions.
lookup_tables = [
t for t in self.alias_map if t in self._lookup_joins or t == self.base_table
]
for trimmed_paths, path in enumerate(all_paths):
if path.m2m:
break
if self.alias_map[lookup_tables[trimmed_paths + 1]].join_type == LOUTER:
contains_louter = True
alias = lookup_tables[trimmed_paths]
self.unref_alias(alias)
# The path.join_field is a Rel, lets get the other side's field
join_field = path.join_field.field
# Build the filter prefix.
paths_in_prefix = trimmed_paths
trimmed_prefix = []
for name, path in names_with_path:
if paths_in_prefix - len(path) < 0:
break
trimmed_prefix.append(name)
paths_in_prefix -= len(path)
trimmed_prefix.append(join_field.foreign_related_fields[0].name)
trimmed_prefix = LOOKUP_SEP.join(trimmed_prefix)
# Lets still see if we can trim the first join from the inner query
# (that is, self). We can't do this for:
# - LEFT JOINs because we would miss those rows that have nothing on
# the outer side,
# - INNER JOINs from filtered relations because we would miss their
# filters.
first_join = self.alias_map[lookup_tables[trimmed_paths + 1]]
if first_join.join_type != LOUTER and not first_join.filtered_relation:
select_fields = [r[0] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths + 1]
self.unref_alias(lookup_tables[trimmed_paths])
extra_restriction = join_field.get_extra_restriction(
None, lookup_tables[trimmed_paths + 1]
)
if extra_restriction:
self.where.add(extra_restriction, AND)
else:
# TODO: It might be possible to trim more joins from the start of the
# inner query if it happens to have a longer join chain containing the
# values in select_fields. Lets punt this one for now.
select_fields = [r[1] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths]
# The found starting point is likely a join_class instead of a
# base_table_class reference. But the first entry in the query's FROM
# clause must not be a JOIN.
for table in self.alias_map:
if self.alias_refcount[table] > 0:
self.alias_map[table] = self.base_table_class(
self.alias_map[table].table_name,
table,
)
break
self.set_select([f.get_col(select_alias) for f in select_fields])
return trimmed_prefix, contains_louter
def is_nullable(self, field):
"""
Check if the given field should be treated as nullable.
Some backends treat '' as null and Django treats such fields as
nullable for those backends. In such situations field.null can be
False even if we should treat the field as nullable.
"""
# We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have
# (nor should it have) knowledge of which connection is going to be
# used. The proper fix would be to defer all decisions where
# is_nullable() is needed to the compiler stage, but that is not easy
# to do currently.
return field.null or (
field.empty_strings_allowed
and connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
)
def get_order_dir(field, default="ASC"):
"""
Return the field name and direction for an order specification. For
example, '-foo' is returned as ('foo', 'DESC').
The 'default' param is used to indicate which way no prefix (or a '+'
prefix) should sort. The '-' prefix always sorts the opposite way.
"""
dirn = ORDER_DIR[default]
if field[0] == "-":
return field[1:], dirn[1]
return field, dirn[0]
def add_to_dict(data, key, value):
"""
Add "value" to the set of values for "key", whether or not "key" already
exists.
"""
if key in data:
data[key].add(value)
else:
data[key] = {value}
def is_reverse_o2o(field):
"""
Check if the given field is reverse-o2o. The field is expected to be some
sort of relation field or related object.
"""
return field.is_relation and field.one_to_one and not field.concrete
class JoinPromoter:
"""
A class to abstract away join promotion problems for complex filter
conditions.
"""
def __init__(self, connector, num_children, negated):
self.connector = connector
self.negated = negated
if self.negated:
if connector == AND:
self.effective_connector = OR
else:
self.effective_connector = AND
else:
self.effective_connector = self.connector
self.num_children = num_children
# Maps of table alias to how many times it is seen as required for
# inner and/or outer joins.
self.votes = Counter()
def __repr__(self):
return (
f"{self.__class__.__qualname__}(connector={self.connector!r}, "
f"num_children={self.num_children!r}, negated={self.negated!r})"
)
def add_votes(self, votes):
"""
Add single vote per item to self.votes. Parameter can be any
iterable.
"""
self.votes.update(votes)
def update_join_types(self, query):
"""
Change join types so that the generated query is as efficient as
possible, but still correct. So, change as many joins as possible
to INNER, but don't make OUTER joins INNER if that could remove
results from the query.
"""
to_promote = set()
to_demote = set()
# The effective_connector is used so that NOT (a AND b) is treated
# similarly to (a OR b) for join promotion.
for table, votes in self.votes.items():
# We must use outer joins in OR case when the join isn't contained
# in all of the joins. Otherwise the INNER JOIN itself could remove
# valid results. Consider the case where a model with rel_a and
# rel_b relations is queried with rel_a__col=1 | rel_b__col=2. Now,
# if rel_a join doesn't produce any results is null (for example
# reverse foreign key or null value in direct foreign key), and
# there is a matching row in rel_b with col=2, then an INNER join
# to rel_a would remove a valid match from the query. So, we need
# to promote any existing INNER to LOUTER (it is possible this
# promotion in turn will be demoted later on).
if self.effective_connector == "OR" and votes < self.num_children:
to_promote.add(table)
# If connector is AND and there is a filter that can match only
# when there is a joinable row, then use INNER. For example, in
# rel_a__col=1 & rel_b__col=2, if either of the rels produce NULL
# as join output, then the col=1 or col=2 can't match (as
# NULL=anything is always false).
# For the OR case, if all children voted for a join to be inner,
# then we can use INNER for the join. For example:
# (rel_a__col__icontains=Alex | rel_a__col__icontains=Russell)
# then if rel_a doesn't produce any rows, the whole condition
# can't match. Hence we can safely use INNER join.
if self.effective_connector == "AND" or (
self.effective_connector == "OR" and votes == self.num_children
):
to_demote.add(table)
# Finally, what happens in cases where we have:
# (rel_a__col=1|rel_b__col=2) & rel_a__col__gte=0
# Now, we first generate the OR clause, and promote joins for it
# in the first if branch above. Both rel_a and rel_b are promoted
# to LOUTER joins. After that we do the AND case. The OR case
# voted no inner joins but the rel_a__col__gte=0 votes inner join
# for rel_a. We demote it back to INNER join (in AND case a single
# vote is enough). The demotion is OK, if rel_a doesn't produce
# rows, then the rel_a__col__gte=0 clause can't be true, and thus
# the whole clause must be false. So, it is safe to use INNER
# join.
# Note that in this example we could just as well have the __gte
# clause and the OR clause swapped. Or we could replace the __gte
# clause with an OR clause containing rel_a__col=1|rel_a__col=2,
# and again we could safely demote to INNER.
query.promote_joins(to_promote)
query.demote_joins(to_demote)
return to_demote
|
7ec28b7a19736b4501f5476aaeca763e66ac5bffd091b79be9327d6a88825bec | import collections
import json
import re
from functools import partial
from itertools import chain
from django.core.exceptions import EmptyResultSet, FieldError
from django.db import DatabaseError, NotSupportedError
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import F, OrderBy, RawSQL, Ref, Value
from django.db.models.functions import Cast, Random
from django.db.models.query_utils import select_related_descend
from django.db.models.sql.constants import (
CURSOR,
GET_ITERATOR_CHUNK_SIZE,
MULTI,
NO_RESULTS,
ORDER_DIR,
SINGLE,
)
from django.db.models.sql.query import Query, get_order_dir
from django.db.transaction import TransactionManagementError
from django.utils.functional import cached_property
from django.utils.hashable import make_hashable
from django.utils.regex_helper import _lazy_re_compile
class SQLCompiler:
# Multiline ordering SQL clause may appear from RawSQL.
ordering_parts = _lazy_re_compile(
r"^(.*)\s(?:ASC|DESC).*",
re.MULTILINE | re.DOTALL,
)
def __init__(self, query, connection, using, elide_empty=True):
self.query = query
self.connection = connection
self.using = using
# Some queries, e.g. coalesced aggregation, need to be executed even if
# they would return an empty result set.
self.elide_empty = elide_empty
self.quote_cache = {"*": "*"}
# The select, klass_info, and annotations are needed by QuerySet.iterator()
# these are set as a side-effect of executing the query. Note that we calculate
# separately a list of extra select columns needed for grammatical correctness
# of the query, but these columns are not included in self.select.
self.select = None
self.annotation_col_map = None
self.klass_info = None
self._meta_ordering = None
def __repr__(self):
return (
f"<{self.__class__.__qualname__} "
f"model={self.query.model.__qualname__} "
f"connection={self.connection!r} using={self.using!r}>"
)
def setup_query(self):
if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map):
self.query.get_initial_alias()
self.select, self.klass_info, self.annotation_col_map = self.get_select()
self.col_count = len(self.select)
def pre_sql_setup(self):
"""
Do any necessary class setup immediately prior to producing SQL. This
is for things that can't necessarily be done in __init__ because we
might not have all the pieces in place at that time.
"""
self.setup_query()
order_by = self.get_order_by()
self.where, self.having = self.query.where.split_having()
extra_select = self.get_extra_select(order_by, self.select)
self.has_extra_select = bool(extra_select)
group_by = self.get_group_by(self.select + extra_select, order_by)
return extra_select, order_by, group_by
def get_group_by(self, select, order_by):
"""
Return a list of 2-tuples of form (sql, params).
The logic of what exactly the GROUP BY clause contains is hard
to describe in other words than "if it passes the test suite,
then it is correct".
"""
# Some examples:
# SomeModel.objects.annotate(Count('somecol'))
# GROUP BY: all fields of the model
#
# SomeModel.objects.values('name').annotate(Count('somecol'))
# GROUP BY: name
#
# SomeModel.objects.annotate(Count('somecol')).values('name')
# GROUP BY: all cols of the model
#
# SomeModel.objects.values('name', 'pk')
# .annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# SomeModel.objects.values('name').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# In fact, the self.query.group_by is the minimal set to GROUP BY. It
# can't be ever restricted to a smaller set, but additional columns in
# HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately
# the end result is that it is impossible to force the query to have
# a chosen GROUP BY clause - you can almost do this by using the form:
# .values(*wanted_cols).annotate(AnAggregate())
# but any later annotations, extra selects, values calls that
# refer some column outside of the wanted_cols, order_by, or even
# filter calls can alter the GROUP BY clause.
# The query.group_by is either None (no GROUP BY at all), True
# (group by select fields), or a list of expressions to be added
# to the group by.
if self.query.group_by is None:
return []
expressions = []
if self.query.group_by is not True:
# If the group by is set to a list (by .values() call most likely),
# then we need to add everything in it to the GROUP BY clause.
# Backwards compatibility hack for setting query.group_by. Remove
# when we have public API way of forcing the GROUP BY clause.
# Converts string references to expressions.
for expr in self.query.group_by:
if not hasattr(expr, "as_sql"):
expressions.append(self.query.resolve_ref(expr))
else:
expressions.append(expr)
# Note that even if the group_by is set, it is only the minimal
# set to group by. So, we need to add cols in select, order_by, and
# having into the select in any case.
ref_sources = {expr.source for expr in expressions if isinstance(expr, Ref)}
for expr, _, _ in select:
# Skip members of the select clause that are already included
# by reference.
if expr in ref_sources:
continue
cols = expr.get_group_by_cols()
for col in cols:
expressions.append(col)
if not self._meta_ordering:
for expr, (sql, params, is_ref) in order_by:
# Skip references to the SELECT clause, as all expressions in
# the SELECT clause are already part of the GROUP BY.
if not is_ref:
expressions.extend(expr.get_group_by_cols())
having_group_by = self.having.get_group_by_cols() if self.having else ()
for expr in having_group_by:
expressions.append(expr)
result = []
seen = set()
expressions = self.collapse_group_by(expressions, having_group_by)
for expr in expressions:
sql, params = self.compile(expr)
sql, params = expr.select_format(self, sql, params)
params_hash = make_hashable(params)
if (sql, params_hash) not in seen:
result.append((sql, params))
seen.add((sql, params_hash))
return result
def collapse_group_by(self, expressions, having):
# If the DB can group by primary key, then group by the primary key of
# query's main model. Note that for PostgreSQL the GROUP BY clause must
# include the primary key of every table, but for MySQL it is enough to
# have the main table's primary key.
if self.connection.features.allows_group_by_pk:
# Determine if the main model's primary key is in the query.
pk = None
for expr in expressions:
# Is this a reference to query's base table primary key? If the
# expression isn't a Col-like, then skip the expression.
if (
getattr(expr, "target", None) == self.query.model._meta.pk
and getattr(expr, "alias", None) == self.query.base_table
):
pk = expr
break
# If the main model's primary key is in the query, group by that
# field, HAVING expressions, and expressions associated with tables
# that don't have a primary key included in the grouped columns.
if pk:
pk_aliases = {
expr.alias
for expr in expressions
if hasattr(expr, "target") and expr.target.primary_key
}
expressions = [pk] + [
expr
for expr in expressions
if expr in having
or (
getattr(expr, "alias", None) is not None
and expr.alias not in pk_aliases
)
]
elif self.connection.features.allows_group_by_selected_pks:
# Filter out all expressions associated with a table's primary key
# present in the grouped columns. This is done by identifying all
# tables that have their primary key included in the grouped
# columns and removing non-primary key columns referring to them.
# Unmanaged models are excluded because they could be representing
# database views on which the optimization might not be allowed.
pks = {
expr
for expr in expressions
if (
hasattr(expr, "target")
and expr.target.primary_key
and self.connection.features.allows_group_by_selected_pks_on_model(
expr.target.model
)
)
}
aliases = {expr.alias for expr in pks}
expressions = [
expr
for expr in expressions
if expr in pks or getattr(expr, "alias", None) not in aliases
]
return expressions
def get_select(self):
"""
Return three values:
- a list of 3-tuples of (expression, (sql, params), alias)
- a klass_info structure,
- a dictionary of annotations
The (sql, params) is what the expression will produce, and alias is the
"AS alias" for the column (possibly None).
The klass_info structure contains the following information:
- The base model of the query.
- Which columns for that model are present in the query (by
position of the select clause).
- related_klass_infos: [f, klass_info] to descent into
The annotations is a dictionary of {'attname': column position} values.
"""
select = []
klass_info = None
annotations = {}
select_idx = 0
for alias, (sql, params) in self.query.extra_select.items():
annotations[alias] = select_idx
select.append((RawSQL(sql, params), alias))
select_idx += 1
assert not (self.query.select and self.query.default_cols)
if self.query.default_cols:
cols = self.get_default_columns()
else:
# self.query.select is a special case. These columns never go to
# any model.
cols = self.query.select
if cols:
select_list = []
for col in cols:
select_list.append(select_idx)
select.append((col, None))
select_idx += 1
klass_info = {
"model": self.query.model,
"select_fields": select_list,
}
for alias, annotation in self.query.annotation_select.items():
annotations[alias] = select_idx
select.append((annotation, alias))
select_idx += 1
if self.query.select_related:
related_klass_infos = self.get_related_selections(select)
klass_info["related_klass_infos"] = related_klass_infos
def get_select_from_parent(klass_info):
for ki in klass_info["related_klass_infos"]:
if ki["from_parent"]:
ki["select_fields"] = (
klass_info["select_fields"] + ki["select_fields"]
)
get_select_from_parent(ki)
get_select_from_parent(klass_info)
ret = []
for col, alias in select:
try:
sql, params = self.compile(col)
except EmptyResultSet:
empty_result_set_value = getattr(
col, "empty_result_set_value", NotImplemented
)
if empty_result_set_value is NotImplemented:
# Select a predicate that's always False.
sql, params = "0", ()
else:
sql, params = self.compile(Value(empty_result_set_value))
else:
sql, params = col.select_format(self, sql, params)
ret.append((col, (sql, params), alias))
return ret, klass_info, annotations
def _order_by_pairs(self):
if self.query.extra_order_by:
ordering = self.query.extra_order_by
elif not self.query.default_ordering:
ordering = self.query.order_by
elif self.query.order_by:
ordering = self.query.order_by
elif (meta := self.query.get_meta()) and meta.ordering:
ordering = meta.ordering
self._meta_ordering = ordering
else:
ordering = []
if self.query.standard_ordering:
default_order, _ = ORDER_DIR["ASC"]
else:
default_order, _ = ORDER_DIR["DESC"]
for field in ordering:
if hasattr(field, "resolve_expression"):
if isinstance(field, Value):
# output_field must be resolved for constants.
field = Cast(field, field.output_field)
if not isinstance(field, OrderBy):
field = field.asc()
if not self.query.standard_ordering:
field = field.copy()
field.reverse_ordering()
yield field, False
continue
if field == "?": # random
yield OrderBy(Random()), False
continue
col, order = get_order_dir(field, default_order)
descending = order == "DESC"
if col in self.query.annotation_select:
# Reference to expression in SELECT clause
yield (
OrderBy(
Ref(col, self.query.annotation_select[col]),
descending=descending,
),
True,
)
continue
if col in self.query.annotations:
# References to an expression which is masked out of the SELECT
# clause.
if self.query.combinator and self.select:
# Don't use the resolved annotation because other
# combinated queries might define it differently.
expr = F(col)
else:
expr = self.query.annotations[col]
if isinstance(expr, Value):
# output_field must be resolved for constants.
expr = Cast(expr, expr.output_field)
yield OrderBy(expr, descending=descending), False
continue
if "." in field:
# This came in through an extra(order_by=...) addition. Pass it
# on verbatim.
table, col = col.split(".", 1)
yield (
OrderBy(
RawSQL(
"%s.%s" % (self.quote_name_unless_alias(table), col), []
),
descending=descending,
),
False,
)
continue
if self.query.extra and col in self.query.extra:
if col in self.query.extra_select:
yield (
OrderBy(
Ref(col, RawSQL(*self.query.extra[col])),
descending=descending,
),
True,
)
else:
yield (
OrderBy(RawSQL(*self.query.extra[col]), descending=descending),
False,
)
else:
if self.query.combinator and self.select:
# Don't use the first model's field because other
# combinated queries might define it differently.
yield OrderBy(F(col), descending=descending), False
else:
# 'col' is of the form 'field' or 'field1__field2' or
# '-field1__field2__field', etc.
yield from self.find_ordering_name(
field,
self.query.get_meta(),
default_order=default_order,
)
def get_order_by(self):
"""
Return a list of 2-tuples of the form (expr, (sql, params, is_ref)) for
the ORDER BY clause.
The order_by clause can alter the select clause (for example it can add
aliases to clauses that do not yet have one, or it can add totally new
select clauses).
"""
result = []
seen = set()
for expr, is_ref in self._order_by_pairs():
resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None)
if self.query.combinator and self.select:
src = resolved.get_source_expressions()[0]
expr_src = expr.get_source_expressions()[0]
# Relabel order by columns to raw numbers if this is a combined
# query; necessary since the columns can't be referenced by the
# fully qualified name and the simple column names may collide.
for idx, (sel_expr, _, col_alias) in enumerate(self.select):
if is_ref and col_alias == src.refs:
src = src.source
elif col_alias and not (
isinstance(expr_src, F) and col_alias == expr_src.name
):
continue
if src == sel_expr:
resolved.set_source_expressions([RawSQL("%d" % (idx + 1), ())])
break
else:
if col_alias:
raise DatabaseError(
"ORDER BY term does not match any column in the result set."
)
# Add column used in ORDER BY clause to the selected
# columns and to each combined query.
order_by_idx = len(self.query.select) + 1
col_name = f"__orderbycol{order_by_idx}"
for q in self.query.combined_queries:
q.add_annotation(expr_src, col_name)
self.query.add_select_col(resolved, col_name)
resolved.set_source_expressions([RawSQL(f"{order_by_idx}", ())])
sql, params = self.compile(resolved)
# Don't add the same column twice, but the order direction is
# not taken into account so we strip it. When this entire method
# is refactored into expressions, then we can check each part as we
# generate it.
without_ordering = self.ordering_parts.search(sql)[1]
params_hash = make_hashable(params)
if (without_ordering, params_hash) in seen:
continue
seen.add((without_ordering, params_hash))
result.append((resolved, (sql, params, is_ref)))
return result
def get_extra_select(self, order_by, select):
extra_select = []
if self.query.distinct and not self.query.distinct_fields:
select_sql = [t[1] for t in select]
for expr, (sql, params, is_ref) in order_by:
without_ordering = self.ordering_parts.search(sql)[1]
if not is_ref and (without_ordering, params) not in select_sql:
extra_select.append((expr, (without_ordering, params), None))
return extra_select
def quote_name_unless_alias(self, name):
"""
A wrapper around connection.ops.quote_name that doesn't quote aliases
for table names. This avoids problems with some SQL dialects that treat
quoted strings specially (e.g. PostgreSQL).
"""
if name in self.quote_cache:
return self.quote_cache[name]
if (
(name in self.query.alias_map and name not in self.query.table_map)
or name in self.query.extra_select
or (
self.query.external_aliases.get(name)
and name not in self.query.table_map
)
):
self.quote_cache[name] = name
return name
r = self.connection.ops.quote_name(name)
self.quote_cache[name] = r
return r
def compile(self, node):
vendor_impl = getattr(node, "as_" + self.connection.vendor, None)
if vendor_impl:
sql, params = vendor_impl(self, self.connection)
else:
sql, params = node.as_sql(self, self.connection)
return sql, params
def get_combinator_sql(self, combinator, all):
features = self.connection.features
compilers = [
query.get_compiler(self.using, self.connection, self.elide_empty)
for query in self.query.combined_queries
if not query.is_empty()
]
if not features.supports_slicing_ordering_in_compound:
for query, compiler in zip(self.query.combined_queries, compilers):
if query.low_mark or query.high_mark:
raise DatabaseError(
"LIMIT/OFFSET not allowed in subqueries of compound statements."
)
if compiler.get_order_by():
raise DatabaseError(
"ORDER BY not allowed in subqueries of compound statements."
)
parts = ()
for compiler in compilers:
try:
# If the columns list is limited, then all combined queries
# must have the same columns list. Set the selects defined on
# the query on all combined queries, if not already set.
if not compiler.query.values_select and self.query.values_select:
compiler.query = compiler.query.clone()
compiler.query.set_values(
(
*self.query.extra_select,
*self.query.values_select,
*self.query.annotation_select,
)
)
part_sql, part_args = compiler.as_sql()
if compiler.query.combinator:
# Wrap in a subquery if wrapping in parentheses isn't
# supported.
if not features.supports_parentheses_in_compound:
part_sql = "SELECT * FROM ({})".format(part_sql)
# Add parentheses when combining with compound query if not
# already added for all compound queries.
elif (
self.query.subquery
or not features.supports_slicing_ordering_in_compound
):
part_sql = "({})".format(part_sql)
parts += ((part_sql, part_args),)
except EmptyResultSet:
# Omit the empty queryset with UNION and with DIFFERENCE if the
# first queryset is nonempty.
if combinator == "union" or (combinator == "difference" and parts):
continue
raise
if not parts:
raise EmptyResultSet
combinator_sql = self.connection.ops.set_operators[combinator]
if all and combinator == "union":
combinator_sql += " ALL"
braces = "{}"
if not self.query.subquery and features.supports_slicing_ordering_in_compound:
braces = "({})"
sql_parts, args_parts = zip(
*((braces.format(sql), args) for sql, args in parts)
)
result = [" {} ".format(combinator_sql).join(sql_parts)]
params = []
for part in args_parts:
params.extend(part)
return result, params
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
If 'with_limits' is False, any limit/offset information is not included
in the query.
"""
refcounts_before = self.query.alias_refcount.copy()
try:
extra_select, order_by, group_by = self.pre_sql_setup()
for_update_part = None
# Is a LIMIT/OFFSET clause needed?
with_limit_offset = with_limits and (
self.query.high_mark is not None or self.query.low_mark
)
combinator = self.query.combinator
features = self.connection.features
if combinator:
if not getattr(features, "supports_select_{}".format(combinator)):
raise NotSupportedError(
"{} is not supported on this database backend.".format(
combinator
)
)
result, params = self.get_combinator_sql(
combinator, self.query.combinator_all
)
else:
distinct_fields, distinct_params = self.get_distinct()
# This must come after 'select', 'ordering', and 'distinct'
# (see docstring of get_from_clause() for details).
from_, f_params = self.get_from_clause()
try:
where, w_params = (
self.compile(self.where) if self.where is not None else ("", [])
)
except EmptyResultSet:
if self.elide_empty:
raise
# Use a predicate that's always False.
where, w_params = "0 = 1", []
having, h_params = (
self.compile(self.having) if self.having is not None else ("", [])
)
result = ["SELECT"]
params = []
if self.query.distinct:
distinct_result, distinct_params = self.connection.ops.distinct_sql(
distinct_fields,
distinct_params,
)
result += distinct_result
params += distinct_params
out_cols = []
col_idx = 1
for _, (s_sql, s_params), alias in self.select + extra_select:
if alias:
s_sql = "%s AS %s" % (
s_sql,
self.connection.ops.quote_name(alias),
)
elif with_col_aliases:
s_sql = "%s AS %s" % (
s_sql,
self.connection.ops.quote_name("col%d" % col_idx),
)
col_idx += 1
params.extend(s_params)
out_cols.append(s_sql)
result += [", ".join(out_cols)]
if from_:
result += ["FROM", *from_]
elif self.connection.features.bare_select_suffix:
result += [self.connection.features.bare_select_suffix]
params.extend(f_params)
if self.query.select_for_update and features.has_select_for_update:
if self.connection.get_autocommit():
raise TransactionManagementError(
"select_for_update cannot be used outside of a transaction."
)
if (
with_limit_offset
and not features.supports_select_for_update_with_limit
):
raise NotSupportedError(
"LIMIT/OFFSET is not supported with "
"select_for_update on this database backend."
)
nowait = self.query.select_for_update_nowait
skip_locked = self.query.select_for_update_skip_locked
of = self.query.select_for_update_of
no_key = self.query.select_for_no_key_update
# If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the
# backend doesn't support it, raise NotSupportedError to
# prevent a possible deadlock.
if nowait and not features.has_select_for_update_nowait:
raise NotSupportedError(
"NOWAIT is not supported on this database backend."
)
elif skip_locked and not features.has_select_for_update_skip_locked:
raise NotSupportedError(
"SKIP LOCKED is not supported on this database backend."
)
elif of and not features.has_select_for_update_of:
raise NotSupportedError(
"FOR UPDATE OF is not supported on this database backend."
)
elif no_key and not features.has_select_for_no_key_update:
raise NotSupportedError(
"FOR NO KEY UPDATE is not supported on this "
"database backend."
)
for_update_part = self.connection.ops.for_update_sql(
nowait=nowait,
skip_locked=skip_locked,
of=self.get_select_for_update_of_arguments(),
no_key=no_key,
)
if for_update_part and features.for_update_after_from:
result.append(for_update_part)
if where:
result.append("WHERE %s" % where)
params.extend(w_params)
grouping = []
for g_sql, g_params in group_by:
grouping.append(g_sql)
params.extend(g_params)
if grouping:
if distinct_fields:
raise NotImplementedError(
"annotate() + distinct(fields) is not implemented."
)
order_by = order_by or self.connection.ops.force_no_ordering()
result.append("GROUP BY %s" % ", ".join(grouping))
if self._meta_ordering:
order_by = None
if having:
result.append("HAVING %s" % having)
params.extend(h_params)
if self.query.explain_info:
result.insert(
0,
self.connection.ops.explain_query_prefix(
self.query.explain_info.format,
**self.query.explain_info.options,
),
)
if order_by:
ordering = []
for _, (o_sql, o_params, _) in order_by:
ordering.append(o_sql)
params.extend(o_params)
result.append("ORDER BY %s" % ", ".join(ordering))
if with_limit_offset:
result.append(
self.connection.ops.limit_offset_sql(
self.query.low_mark, self.query.high_mark
)
)
if for_update_part and not features.for_update_after_from:
result.append(for_update_part)
if self.query.subquery and extra_select:
# If the query is used as a subquery, the extra selects would
# result in more columns than the left-hand side expression is
# expecting. This can happen when a subquery uses a combination
# of order_by() and distinct(), forcing the ordering expressions
# to be selected as well. Wrap the query in another subquery
# to exclude extraneous selects.
sub_selects = []
sub_params = []
for index, (select, _, alias) in enumerate(self.select, start=1):
if not alias and with_col_aliases:
alias = "col%d" % index
if alias:
sub_selects.append(
"%s.%s"
% (
self.connection.ops.quote_name("subquery"),
self.connection.ops.quote_name(alias),
)
)
else:
select_clone = select.relabeled_clone(
{select.alias: "subquery"}
)
subselect, subparams = select_clone.as_sql(
self, self.connection
)
sub_selects.append(subselect)
sub_params.extend(subparams)
return "SELECT %s FROM (%s) subquery" % (
", ".join(sub_selects),
" ".join(result),
), tuple(sub_params + params)
return " ".join(result), tuple(params)
finally:
# Finally do cleanup - get rid of the joins we created above.
self.query.reset_refcounts(refcounts_before)
def get_default_columns(self, start_alias=None, opts=None, from_parent=None):
"""
Compute the default columns for selecting every field in the base
model. Will sometimes be called to pull in related models (e.g. via
select_related), in which case "opts" and "start_alias" will be given
to provide a starting point for the traversal.
Return a list of strings, quoted appropriately for use in SQL
directly, as well as a set of aliases used in the select statement (if
'as_pairs' is True, return a list of (alias, col_name) pairs instead
of strings as the first component and None as the second component).
"""
result = []
if opts is None:
if (opts := self.query.get_meta()) is None:
return result
only_load = self.deferred_to_columns()
start_alias = start_alias or self.query.get_initial_alias()
# The 'seen_models' is used to optimize checking the needed parent
# alias for a given field. This also includes None -> start_alias to
# be used by local fields.
seen_models = {None: start_alias}
for field in opts.concrete_fields:
model = field.model._meta.concrete_model
# A proxy model will have a different model and concrete_model. We
# will assign None if the field belongs to this model.
if model == opts.model:
model = None
if (
from_parent
and model is not None
and issubclass(
from_parent._meta.concrete_model, model._meta.concrete_model
)
):
# Avoid loading data for already loaded parents.
# We end up here in the case select_related() resolution
# proceeds from parent model to child model. In that case the
# parent model data is already present in the SELECT clause,
# and we want to avoid reloading the same data again.
continue
if field.model in only_load and field.attname not in only_load[field.model]:
continue
alias = self.query.join_parent_model(opts, model, start_alias, seen_models)
column = field.get_col(alias)
result.append(column)
return result
def get_distinct(self):
"""
Return a quoted list of fields to use in DISTINCT ON part of the query.
This method can alter the tables in the query, and thus it must be
called before get_from_clause().
"""
result = []
params = []
opts = self.query.get_meta()
for name in self.query.distinct_fields:
parts = name.split(LOOKUP_SEP)
_, targets, alias, joins, path, _, transform_function = self._setup_joins(
parts, opts, None
)
targets, alias, _ = self.query.trim_joins(targets, joins, path)
for target in targets:
if name in self.query.annotation_select:
result.append(self.connection.ops.quote_name(name))
else:
r, p = self.compile(transform_function(target, alias))
result.append(r)
params.append(p)
return result, params
def find_ordering_name(
self, name, opts, alias=None, default_order="ASC", already_seen=None
):
"""
Return the table alias (the name might be ambiguous, the alias will
not be) and column name for ordering by the given 'name' parameter.
The 'name' is of the form 'field1__field2__...__fieldN'.
"""
name, order = get_order_dir(name, default_order)
descending = order == "DESC"
pieces = name.split(LOOKUP_SEP)
(
field,
targets,
alias,
joins,
path,
opts,
transform_function,
) = self._setup_joins(pieces, opts, alias)
# If we get to this point and the field is a relation to another model,
# append the default ordering for that model unless it is the pk
# shortcut or the attribute name of the field that is specified.
if (
field.is_relation
and opts.ordering
and getattr(field, "attname", None) != pieces[-1]
and name != "pk"
):
# Firstly, avoid infinite loops.
already_seen = already_seen or set()
join_tuple = tuple(
getattr(self.query.alias_map[j], "join_cols", None) for j in joins
)
if join_tuple in already_seen:
raise FieldError("Infinite loop caused by ordering.")
already_seen.add(join_tuple)
results = []
for item in opts.ordering:
if hasattr(item, "resolve_expression") and not isinstance(
item, OrderBy
):
item = item.desc() if descending else item.asc()
if isinstance(item, OrderBy):
results.append((item, False))
continue
results.extend(
self.find_ordering_name(item, opts, alias, order, already_seen)
)
return results
targets, alias, _ = self.query.trim_joins(targets, joins, path)
return [
(OrderBy(transform_function(t, alias), descending=descending), False)
for t in targets
]
def _setup_joins(self, pieces, opts, alias):
"""
Helper method for get_order_by() and get_distinct().
get_ordering() and get_distinct() must produce same target columns on
same input, as the prefixes of get_ordering() and get_distinct() must
match. Executing SQL where this is not true is an error.
"""
alias = alias or self.query.get_initial_alias()
field, targets, opts, joins, path, transform_function = self.query.setup_joins(
pieces, opts, alias
)
alias = joins[-1]
return field, targets, alias, joins, path, opts, transform_function
def get_from_clause(self):
"""
Return a list of strings that are joined together to go after the
"FROM" part of the query, as well as a list any extra parameters that
need to be included. Subclasses, can override this to create a
from-clause via a "select".
This should only be called after any SQL construction methods that
might change the tables that are needed. This means the select columns,
ordering, and distinct must be done first.
"""
result = []
params = []
for alias in tuple(self.query.alias_map):
if not self.query.alias_refcount[alias]:
continue
try:
from_clause = self.query.alias_map[alias]
except KeyError:
# Extra tables can end up in self.tables, but not in the
# alias_map if they aren't in a join. That's OK. We skip them.
continue
clause_sql, clause_params = self.compile(from_clause)
result.append(clause_sql)
params.extend(clause_params)
for t in self.query.extra_tables:
alias, _ = self.query.table_alias(t)
# Only add the alias if it's not already present (the table_alias()
# call increments the refcount, so an alias refcount of one means
# this is the only reference).
if (
alias not in self.query.alias_map
or self.query.alias_refcount[alias] == 1
):
result.append(", %s" % self.quote_name_unless_alias(alias))
return result, params
def get_related_selections(
self,
select,
opts=None,
root_alias=None,
cur_depth=1,
requested=None,
restricted=None,
):
"""
Fill in the information needed for a select_related query. The current
depth is measured as the number of connections away from the root model
(for example, cur_depth=1 means we are looking at models with direct
connections to the root model).
"""
def _get_field_choices():
direct_choices = (f.name for f in opts.fields if f.is_relation)
reverse_choices = (
f.field.related_query_name()
for f in opts.related_objects
if f.field.unique
)
return chain(
direct_choices, reverse_choices, self.query._filtered_relations
)
related_klass_infos = []
if not restricted and cur_depth > self.query.max_depth:
# We've recursed far enough; bail out.
return related_klass_infos
if not opts:
opts = self.query.get_meta()
root_alias = self.query.get_initial_alias()
only_load = self.deferred_to_columns()
# Setup for the case when only particular related fields should be
# included in the related selection.
fields_found = set()
if requested is None:
restricted = isinstance(self.query.select_related, dict)
if restricted:
requested = self.query.select_related
def get_related_klass_infos(klass_info, related_klass_infos):
klass_info["related_klass_infos"] = related_klass_infos
for f in opts.fields:
field_model = f.model._meta.concrete_model
fields_found.add(f.name)
if restricted:
next = requested.get(f.name, {})
if not f.is_relation:
# If a non-related field is used like a relation,
# or if a single non-relational field is given.
if next or f.name in requested:
raise FieldError(
"Non-relational field given in select_related: '%s'. "
"Choices are: %s"
% (
f.name,
", ".join(_get_field_choices()) or "(none)",
)
)
else:
next = False
if not select_related_descend(
f, restricted, requested, only_load.get(field_model)
):
continue
klass_info = {
"model": f.remote_field.model,
"field": f,
"reverse": False,
"local_setter": f.set_cached_value,
"remote_setter": f.remote_field.set_cached_value
if f.unique
else lambda x, y: None,
"from_parent": False,
}
related_klass_infos.append(klass_info)
select_fields = []
_, _, _, joins, _, _ = self.query.setup_joins([f.name], opts, root_alias)
alias = joins[-1]
columns = self.get_default_columns(
start_alias=alias, opts=f.remote_field.model._meta
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info["select_fields"] = select_fields
next_klass_infos = self.get_related_selections(
select,
f.remote_field.model._meta,
alias,
cur_depth + 1,
next,
restricted,
)
get_related_klass_infos(klass_info, next_klass_infos)
if restricted:
related_fields = [
(o.field, o.related_model)
for o in opts.related_objects
if o.field.unique and not o.many_to_many
]
for f, model in related_fields:
if not select_related_descend(
f, restricted, requested, only_load.get(model), reverse=True
):
continue
related_field_name = f.related_query_name()
fields_found.add(related_field_name)
join_info = self.query.setup_joins(
[related_field_name], opts, root_alias
)
alias = join_info.joins[-1]
from_parent = issubclass(model, opts.model) and model is not opts.model
klass_info = {
"model": model,
"field": f,
"reverse": True,
"local_setter": f.remote_field.set_cached_value,
"remote_setter": f.set_cached_value,
"from_parent": from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias, opts=model._meta, from_parent=opts.model
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info["select_fields"] = select_fields
next = requested.get(f.related_query_name(), {})
next_klass_infos = self.get_related_selections(
select, model._meta, alias, cur_depth + 1, next, restricted
)
get_related_klass_infos(klass_info, next_klass_infos)
def local_setter(obj, from_obj):
# Set a reverse fk object when relation is non-empty.
if from_obj:
f.remote_field.set_cached_value(from_obj, obj)
def remote_setter(name, obj, from_obj):
setattr(from_obj, name, obj)
for name in list(requested):
# Filtered relations work only on the topmost level.
if cur_depth > 1:
break
if name in self.query._filtered_relations:
fields_found.add(name)
f, _, join_opts, joins, _, _ = self.query.setup_joins(
[name], opts, root_alias
)
model = join_opts.model
alias = joins[-1]
from_parent = (
issubclass(model, opts.model) and model is not opts.model
)
klass_info = {
"model": model,
"field": f,
"reverse": True,
"local_setter": local_setter,
"remote_setter": partial(remote_setter, name),
"from_parent": from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias,
opts=model._meta,
from_parent=opts.model,
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info["select_fields"] = select_fields
next_requested = requested.get(name, {})
next_klass_infos = self.get_related_selections(
select,
opts=model._meta,
root_alias=alias,
cur_depth=cur_depth + 1,
requested=next_requested,
restricted=restricted,
)
get_related_klass_infos(klass_info, next_klass_infos)
fields_not_found = set(requested).difference(fields_found)
if fields_not_found:
invalid_fields = ("'%s'" % s for s in fields_not_found)
raise FieldError(
"Invalid field name(s) given in select_related: %s. "
"Choices are: %s"
% (
", ".join(invalid_fields),
", ".join(_get_field_choices()) or "(none)",
)
)
return related_klass_infos
def get_select_for_update_of_arguments(self):
"""
Return a quoted list of arguments for the SELECT FOR UPDATE OF part of
the query.
"""
def _get_parent_klass_info(klass_info):
concrete_model = klass_info["model"]._meta.concrete_model
for parent_model, parent_link in concrete_model._meta.parents.items():
parent_list = parent_model._meta.get_parent_list()
yield {
"model": parent_model,
"field": parent_link,
"reverse": False,
"select_fields": [
select_index
for select_index in klass_info["select_fields"]
# Selected columns from a model or its parents.
if (
self.select[select_index][0].target.model == parent_model
or self.select[select_index][0].target.model in parent_list
)
],
}
def _get_first_selected_col_from_model(klass_info):
"""
Find the first selected column from a model. If it doesn't exist,
don't lock a model.
select_fields is filled recursively, so it also contains fields
from the parent models.
"""
concrete_model = klass_info["model"]._meta.concrete_model
for select_index in klass_info["select_fields"]:
if self.select[select_index][0].target.model == concrete_model:
return self.select[select_index][0]
def _get_field_choices():
"""Yield all allowed field paths in breadth-first search order."""
queue = collections.deque([(None, self.klass_info)])
while queue:
parent_path, klass_info = queue.popleft()
if parent_path is None:
path = []
yield "self"
else:
field = klass_info["field"]
if klass_info["reverse"]:
field = field.remote_field
path = parent_path + [field.name]
yield LOOKUP_SEP.join(path)
queue.extend(
(path, klass_info)
for klass_info in _get_parent_klass_info(klass_info)
)
queue.extend(
(path, klass_info)
for klass_info in klass_info.get("related_klass_infos", [])
)
if not self.klass_info:
return []
result = []
invalid_names = []
for name in self.query.select_for_update_of:
klass_info = self.klass_info
if name == "self":
col = _get_first_selected_col_from_model(klass_info)
else:
for part in name.split(LOOKUP_SEP):
klass_infos = (
*klass_info.get("related_klass_infos", []),
*_get_parent_klass_info(klass_info),
)
for related_klass_info in klass_infos:
field = related_klass_info["field"]
if related_klass_info["reverse"]:
field = field.remote_field
if field.name == part:
klass_info = related_klass_info
break
else:
klass_info = None
break
if klass_info is None:
invalid_names.append(name)
continue
col = _get_first_selected_col_from_model(klass_info)
if col is not None:
if self.connection.features.select_for_update_of_column:
result.append(self.compile(col)[0])
else:
result.append(self.quote_name_unless_alias(col.alias))
if invalid_names:
raise FieldError(
"Invalid field name(s) given in select_for_update(of=(...)): %s. "
"Only relational fields followed in the query are allowed. "
"Choices are: %s."
% (
", ".join(invalid_names),
", ".join(_get_field_choices()),
)
)
return result
def deferred_to_columns(self):
"""
Convert the self.deferred_loading data structure to mapping of table
names to sets of column names which are to be loaded. Return the
dictionary.
"""
columns = {}
self.query.deferred_to_data(columns)
return columns
def get_converters(self, expressions):
converters = {}
for i, expression in enumerate(expressions):
if expression:
backend_converters = self.connection.ops.get_db_converters(expression)
field_converters = expression.get_db_converters(self.connection)
if backend_converters or field_converters:
converters[i] = (backend_converters + field_converters, expression)
return converters
def apply_converters(self, rows, converters):
connection = self.connection
converters = list(converters.items())
for row in map(list, rows):
for pos, (convs, expression) in converters:
value = row[pos]
for converter in convs:
value = converter(value, expression, connection)
row[pos] = value
yield row
def results_iter(
self,
results=None,
tuple_expected=False,
chunked_fetch=False,
chunk_size=GET_ITERATOR_CHUNK_SIZE,
):
"""Return an iterator over the results from executing this query."""
if results is None:
results = self.execute_sql(
MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size
)
fields = [s[0] for s in self.select[0 : self.col_count]]
converters = self.get_converters(fields)
rows = chain.from_iterable(results)
if converters:
rows = self.apply_converters(rows, converters)
if tuple_expected:
rows = map(tuple, rows)
return rows
def has_results(self):
"""
Backends (e.g. NoSQL) can override this in order to use optimized
versions of "query has any results."
"""
return bool(self.execute_sql(SINGLE))
def execute_sql(
self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE
):
"""
Run the query against the database and return the result(s). The
return value is a single data item if result_type is SINGLE, or an
iterator over the results if the result_type is MULTI.
result_type is either MULTI (use fetchmany() to retrieve all rows),
SINGLE (only retrieve a single row), or None. In this last case, the
cursor is returned if any query is executed, since it's used by
subclasses such as InsertQuery). It's possible, however, that no query
is needed, as the filters describe an empty set. In that case, None is
returned, to avoid any unnecessary database interaction.
"""
result_type = result_type or NO_RESULTS
try:
sql, params = self.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return iter([])
else:
return
if chunked_fetch:
cursor = self.connection.chunked_cursor()
else:
cursor = self.connection.cursor()
try:
cursor.execute(sql, params)
except Exception:
# Might fail for server-side cursors (e.g. connection closed)
cursor.close()
raise
if result_type == CURSOR:
# Give the caller the cursor to process and close.
return cursor
if result_type == SINGLE:
try:
val = cursor.fetchone()
if val:
return val[0 : self.col_count]
return val
finally:
# done with the cursor
cursor.close()
if result_type == NO_RESULTS:
cursor.close()
return
result = cursor_iter(
cursor,
self.connection.features.empty_fetchmany_value,
self.col_count if self.has_extra_select else None,
chunk_size,
)
if not chunked_fetch or not self.connection.features.can_use_chunked_reads:
# If we are using non-chunked reads, we return the same data
# structure as normally, but ensure it is all read into memory
# before going any further. Use chunked_fetch if requested,
# unless the database doesn't support it.
return list(result)
return result
def as_subquery_condition(self, alias, columns, compiler):
qn = compiler.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
for index, select_col in enumerate(self.query.select):
lhs_sql, lhs_params = self.compile(select_col)
rhs = "%s.%s" % (qn(alias), qn2(columns[index]))
self.query.where.add(RawSQL("%s = %s" % (lhs_sql, rhs), lhs_params), "AND")
sql, params = self.as_sql()
return "EXISTS (%s)" % sql, params
def explain_query(self):
result = list(self.execute_sql())
# Some backends return 1 item tuples with strings, and others return
# tuples with integers and strings. Flatten them out into strings.
output_formatter = (
json.dumps if self.query.explain_info.format == "json" else str
)
for row in result[0]:
if not isinstance(row, str):
yield " ".join(output_formatter(c) for c in row)
else:
yield row
class SQLInsertCompiler(SQLCompiler):
returning_fields = None
returning_params = tuple()
def field_as_sql(self, field, val):
"""
Take a field and a value intended to be saved on that field, and
return placeholder SQL and accompanying params. Check for raw values,
expressions, and fields with get_placeholder() defined in that order.
When field is None, consider the value raw and use it as the
placeholder, with no corresponding parameters returned.
"""
if field is None:
# A field value of None means the value is raw.
sql, params = val, []
elif hasattr(val, "as_sql"):
# This is an expression, let's compile it.
sql, params = self.compile(val)
elif hasattr(field, "get_placeholder"):
# Some fields (e.g. geo fields) need special munging before
# they can be inserted.
sql, params = field.get_placeholder(val, self, self.connection), [val]
else:
# Return the common case for the placeholder
sql, params = "%s", [val]
# The following hook is only used by Oracle Spatial, which sometimes
# needs to yield 'NULL' and [] as its placeholder and params instead
# of '%s' and [None]. The 'NULL' placeholder is produced earlier by
# OracleOperations.get_geom_placeholder(). The following line removes
# the corresponding None parameter. See ticket #10888.
params = self.connection.ops.modify_insert_params(sql, params)
return sql, params
def prepare_value(self, field, value):
"""
Prepare a value to be used in a query by resolving it if it is an
expression and otherwise calling the field's get_db_prep_save().
"""
if hasattr(value, "resolve_expression"):
value = value.resolve_expression(
self.query, allow_joins=False, for_save=True
)
# Don't allow values containing Col expressions. They refer to
# existing columns on a row, but in the case of insert the row
# doesn't exist yet.
if value.contains_column_references:
raise ValueError(
'Failed to insert expression "%s" on %s. F() expressions '
"can only be used to update, not to insert." % (value, field)
)
if value.contains_aggregate:
raise FieldError(
"Aggregate functions are not allowed in this query "
"(%s=%r)." % (field.name, value)
)
if value.contains_over_clause:
raise FieldError(
"Window expressions are not allowed in this query (%s=%r)."
% (field.name, value)
)
else:
value = field.get_db_prep_save(value, connection=self.connection)
return value
def pre_save_val(self, field, obj):
"""
Get the given field's value off the given obj. pre_save() is used for
things like auto_now on DateTimeField. Skip it if this is a raw query.
"""
if self.query.raw:
return getattr(obj, field.attname)
return field.pre_save(obj, add=True)
def assemble_as_sql(self, fields, value_rows):
"""
Take a sequence of N fields and a sequence of M rows of values, and
generate placeholder SQL and parameters for each field and value.
Return a pair containing:
* a sequence of M rows of N SQL placeholder strings, and
* a sequence of M rows of corresponding parameter values.
Each placeholder string may contain any number of '%s' interpolation
strings, and each parameter row will contain exactly as many params
as the total number of '%s's in the corresponding placeholder row.
"""
if not value_rows:
return [], []
# list of (sql, [params]) tuples for each object to be saved
# Shape: [n_objs][n_fields][2]
rows_of_fields_as_sql = (
(self.field_as_sql(field, v) for field, v in zip(fields, row))
for row in value_rows
)
# tuple like ([sqls], [[params]s]) for each object to be saved
# Shape: [n_objs][2][n_fields]
sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql)
# Extract separate lists for placeholders and params.
# Each of these has shape [n_objs][n_fields]
placeholder_rows, param_rows = zip(*sql_and_param_pair_rows)
# Params for each field are still lists, and need to be flattened.
param_rows = [[p for ps in row for p in ps] for row in param_rows]
return placeholder_rows, param_rows
def as_sql(self):
# We don't need quote_name_unless_alias() here, since these are all
# going to be column names (so we can avoid the extra overhead).
qn = self.connection.ops.quote_name
opts = self.query.get_meta()
insert_statement = self.connection.ops.insert_statement(
on_conflict=self.query.on_conflict,
)
result = ["%s %s" % (insert_statement, qn(opts.db_table))]
fields = self.query.fields or [opts.pk]
result.append("(%s)" % ", ".join(qn(f.column) for f in fields))
if self.query.fields:
value_rows = [
[
self.prepare_value(field, self.pre_save_val(field, obj))
for field in fields
]
for obj in self.query.objs
]
else:
# An empty object.
value_rows = [
[self.connection.ops.pk_default_value()] for _ in self.query.objs
]
fields = [None]
# Currently the backends just accept values when generating bulk
# queries and generate their own placeholders. Doing that isn't
# necessary and it should be possible to use placeholders and
# expressions in bulk inserts too.
can_bulk = (
not self.returning_fields and self.connection.features.has_bulk_insert
)
placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows)
on_conflict_suffix_sql = self.connection.ops.on_conflict_suffix_sql(
fields,
self.query.on_conflict,
self.query.update_fields,
self.query.unique_fields,
)
if (
self.returning_fields
and self.connection.features.can_return_columns_from_insert
):
if self.connection.features.can_return_rows_from_bulk_insert:
result.append(
self.connection.ops.bulk_insert_sql(fields, placeholder_rows)
)
params = param_rows
else:
result.append("VALUES (%s)" % ", ".join(placeholder_rows[0]))
params = [param_rows[0]]
if on_conflict_suffix_sql:
result.append(on_conflict_suffix_sql)
# Skip empty r_sql to allow subclasses to customize behavior for
# 3rd party backends. Refs #19096.
r_sql, self.returning_params = self.connection.ops.return_insert_columns(
self.returning_fields
)
if r_sql:
result.append(r_sql)
params += [self.returning_params]
return [(" ".join(result), tuple(chain.from_iterable(params)))]
if can_bulk:
result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
if on_conflict_suffix_sql:
result.append(on_conflict_suffix_sql)
return [(" ".join(result), tuple(p for ps in param_rows for p in ps))]
else:
if on_conflict_suffix_sql:
result.append(on_conflict_suffix_sql)
return [
(" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
for p, vals in zip(placeholder_rows, param_rows)
]
def execute_sql(self, returning_fields=None):
assert not (
returning_fields
and len(self.query.objs) != 1
and not self.connection.features.can_return_rows_from_bulk_insert
)
opts = self.query.get_meta()
self.returning_fields = returning_fields
with self.connection.cursor() as cursor:
for sql, params in self.as_sql():
cursor.execute(sql, params)
if not self.returning_fields:
return []
if (
self.connection.features.can_return_rows_from_bulk_insert
and len(self.query.objs) > 1
):
rows = self.connection.ops.fetch_returned_insert_rows(cursor)
elif self.connection.features.can_return_columns_from_insert:
assert len(self.query.objs) == 1
rows = [
self.connection.ops.fetch_returned_insert_columns(
cursor,
self.returning_params,
)
]
else:
rows = [
(
self.connection.ops.last_insert_id(
cursor,
opts.db_table,
opts.pk.column,
),
)
]
cols = [field.get_col(opts.db_table) for field in self.returning_fields]
converters = self.get_converters(cols)
if converters:
rows = list(self.apply_converters(rows, converters))
return rows
class SQLDeleteCompiler(SQLCompiler):
@cached_property
def single_alias(self):
# Ensure base table is in aliases.
self.query.get_initial_alias()
return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1
@classmethod
def _expr_refs_base_model(cls, expr, base_model):
if isinstance(expr, Query):
return expr.model == base_model
if not hasattr(expr, "get_source_expressions"):
return False
return any(
cls._expr_refs_base_model(source_expr, base_model)
for source_expr in expr.get_source_expressions()
)
@cached_property
def contains_self_reference_subquery(self):
return any(
self._expr_refs_base_model(expr, self.query.model)
for expr in chain(
self.query.annotations.values(), self.query.where.children
)
)
def _as_sql(self, query):
result = ["DELETE FROM %s" % self.quote_name_unless_alias(query.base_table)]
where, params = self.compile(query.where)
if where:
result.append("WHERE %s" % where)
return " ".join(result), tuple(params)
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
if self.single_alias and not self.contains_self_reference_subquery:
return self._as_sql(self.query)
innerq = self.query.clone()
innerq.__class__ = Query
innerq.clear_select_clause()
pk = self.query.model._meta.pk
innerq.select = [pk.get_col(self.query.get_initial_alias())]
outerq = Query(self.query.model)
if not self.connection.features.update_can_self_select:
# Force the materialization of the inner query to allow reference
# to the target table on MySQL.
sql, params = innerq.get_compiler(connection=self.connection).as_sql()
innerq = RawSQL("SELECT * FROM (%s) subquery" % sql, params)
outerq.add_filter("pk__in", innerq)
return self._as_sql(outerq)
class SQLUpdateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
self.pre_sql_setup()
if not self.query.values:
return "", ()
qn = self.quote_name_unless_alias
values, update_params = [], []
for field, model, val in self.query.values:
if hasattr(val, "resolve_expression"):
val = val.resolve_expression(
self.query, allow_joins=False, for_save=True
)
if val.contains_aggregate:
raise FieldError(
"Aggregate functions are not allowed in this query "
"(%s=%r)." % (field.name, val)
)
if val.contains_over_clause:
raise FieldError(
"Window expressions are not allowed in this query "
"(%s=%r)." % (field.name, val)
)
elif hasattr(val, "prepare_database_save"):
if field.remote_field:
val = field.get_db_prep_save(
val.prepare_database_save(field),
connection=self.connection,
)
else:
raise TypeError(
"Tried to update field %s with a model instance, %r. "
"Use a value compatible with %s."
% (field, val, field.__class__.__name__)
)
else:
val = field.get_db_prep_save(val, connection=self.connection)
# Getting the placeholder for the field.
if hasattr(field, "get_placeholder"):
placeholder = field.get_placeholder(val, self, self.connection)
else:
placeholder = "%s"
name = field.column
if hasattr(val, "as_sql"):
sql, params = self.compile(val)
values.append("%s = %s" % (qn(name), placeholder % sql))
update_params.extend(params)
elif val is not None:
values.append("%s = %s" % (qn(name), placeholder))
update_params.append(val)
else:
values.append("%s = NULL" % qn(name))
table = self.query.base_table
result = [
"UPDATE %s SET" % qn(table),
", ".join(values),
]
where, params = self.compile(self.query.where)
if where:
result.append("WHERE %s" % where)
return " ".join(result), tuple(update_params + params)
def execute_sql(self, result_type):
"""
Execute the specified update. Return the number of rows affected by
the primary update query. The "primary update query" is the first
non-empty query that is executed. Row counts for any subsequent,
related queries are not available.
"""
cursor = super().execute_sql(result_type)
try:
rows = cursor.rowcount if cursor else 0
is_empty = cursor is None
finally:
if cursor:
cursor.close()
for query in self.query.get_related_updates():
aux_rows = query.get_compiler(self.using).execute_sql(result_type)
if is_empty and aux_rows:
rows = aux_rows
is_empty = False
return rows
def pre_sql_setup(self):
"""
If the update depends on results from other tables, munge the "where"
conditions to match the format required for (portable) SQL updates.
If multiple updates are required, pull out the id values to update at
this point so that they don't change as a result of the progressive
updates.
"""
refcounts_before = self.query.alias_refcount.copy()
# Ensure base table is in the query
self.query.get_initial_alias()
count = self.query.count_active_tables()
if not self.query.related_updates and count == 1:
return
query = self.query.chain(klass=Query)
query.select_related = False
query.clear_ordering(force=True)
query.extra = {}
query.select = []
meta = query.get_meta()
fields = [meta.pk.name]
related_ids_index = []
for related in self.query.related_updates:
if all(
path.join_field.primary_key for path in meta.get_path_to_parent(related)
):
# If a primary key chain exists to the targeted related update,
# then the meta.pk value can be used for it.
related_ids_index.append((related, 0))
else:
# This branch will only be reached when updating a field of an
# ancestor that is not part of the primary key chain of a MTI
# tree.
related_ids_index.append((related, len(fields)))
fields.append(related._meta.pk.name)
query.add_fields(fields)
super().pre_sql_setup()
must_pre_select = (
count > 1 and not self.connection.features.update_can_self_select
)
# Now we adjust the current query: reset the where clause and get rid
# of all the tables we don't need (since they're in the sub-select).
self.query.clear_where()
if self.query.related_updates or must_pre_select:
# Either we're using the idents in multiple update queries (so
# don't want them to change), or the db backend doesn't support
# selecting from the updating table (e.g. MySQL).
idents = []
related_ids = collections.defaultdict(list)
for rows in query.get_compiler(self.using).execute_sql(MULTI):
idents.extend(r[0] for r in rows)
for parent, index in related_ids_index:
related_ids[parent].extend(r[index] for r in rows)
self.query.add_filter("pk__in", idents)
self.query.related_ids = related_ids
else:
# The fast path. Filters and updates in one query.
self.query.add_filter("pk__in", query)
self.query.reset_refcounts(refcounts_before)
class SQLAggregateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
sql, params = [], []
for annotation in self.query.annotation_select.values():
ann_sql, ann_params = self.compile(annotation)
ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params)
sql.append(ann_sql)
params.extend(ann_params)
self.col_count = len(self.query.annotation_select)
sql = ", ".join(sql)
params = tuple(params)
inner_query_sql, inner_query_params = self.query.inner_query.get_compiler(
self.using,
elide_empty=self.elide_empty,
).as_sql(with_col_aliases=True)
sql = "SELECT %s FROM (%s) subquery" % (sql, inner_query_sql)
params = params + inner_query_params
return sql, params
def cursor_iter(cursor, sentinel, col_count, itersize):
"""
Yield blocks of rows from a cursor and ensure the cursor is closed when
done.
"""
try:
for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel):
yield rows if col_count is None else [r[:col_count] for r in rows]
finally:
cursor.close()
|
7aac1aaa17bf036f8e54a718bba3775a2ea30ef7ed9d59d0fb7d2f15f64c079c | """
Query subclasses which provide extra functionality beyond simple data retrieval.
"""
from django.core.exceptions import FieldError
from django.db.models.sql.constants import CURSOR, GET_ITERATOR_CHUNK_SIZE, NO_RESULTS
from django.db.models.sql.query import Query
__all__ = ["DeleteQuery", "UpdateQuery", "InsertQuery", "AggregateQuery"]
class DeleteQuery(Query):
"""A DELETE SQL query."""
compiler = "SQLDeleteCompiler"
def do_query(self, table, where, using):
self.alias_map = {table: self.alias_map[table]}
self.where = where
cursor = self.get_compiler(using).execute_sql(CURSOR)
if cursor:
with cursor:
return cursor.rowcount
return 0
def delete_batch(self, pk_list, using):
"""
Set up and execute delete queries for all the objects in pk_list.
More than one physical query may be executed if there are a
lot of values in pk_list.
"""
# number of objects deleted
num_deleted = 0
field = self.get_meta().pk
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
self.clear_where()
self.add_filter(
f"{field.attname}__in",
pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE],
)
num_deleted += self.do_query(
self.get_meta().db_table, self.where, using=using
)
return num_deleted
class UpdateQuery(Query):
"""An UPDATE SQL query."""
compiler = "SQLUpdateCompiler"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._setup_query()
def _setup_query(self):
"""
Run on initialization and at the end of chaining. Any attributes that
would normally be set in __init__() should go here instead.
"""
self.values = []
self.related_ids = None
self.related_updates = {}
def clone(self):
obj = super().clone()
obj.related_updates = self.related_updates.copy()
return obj
def update_batch(self, pk_list, values, using):
self.add_update_values(values)
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
self.clear_where()
self.add_filter(
"pk__in", pk_list[offset : offset + GET_ITERATOR_CHUNK_SIZE]
)
self.get_compiler(using).execute_sql(NO_RESULTS)
def add_update_values(self, values):
"""
Convert a dictionary of field name to value mappings into an update
query. This is the entry point for the public update() method on
querysets.
"""
values_seq = []
for name, val in values.items():
field = self.get_meta().get_field(name)
direct = (
not (field.auto_created and not field.concrete) or not field.concrete
)
model = field.model._meta.concrete_model
if not direct or (field.is_relation and field.many_to_many):
raise FieldError(
"Cannot update model field %r (only non-relations and "
"foreign keys permitted)." % field
)
if model is not self.get_meta().concrete_model:
self.add_related_update(model, field, val)
continue
values_seq.append((field, model, val))
return self.add_update_fields(values_seq)
def add_update_fields(self, values_seq):
"""
Append a sequence of (field, model, value) triples to the internal list
that will be used to generate the UPDATE query. Might be more usefully
called add_update_targets() to hint at the extra information here.
"""
for field, model, val in values_seq:
if hasattr(val, "resolve_expression"):
# Resolve expressions here so that annotations are no longer needed
val = val.resolve_expression(self, allow_joins=False, for_save=True)
self.values.append((field, model, val))
def add_related_update(self, model, field, value):
"""
Add (name, value) to an update query for an ancestor model.
Update are coalesced so that only one update query per ancestor is run.
"""
self.related_updates.setdefault(model, []).append((field, None, value))
def get_related_updates(self):
"""
Return a list of query objects: one for each update required to an
ancestor model. Each query will have the same filtering conditions as
the current query but will only update a single table.
"""
if not self.related_updates:
return []
result = []
for model, values in self.related_updates.items():
query = UpdateQuery(model)
query.values = values
if self.related_ids is not None:
query.add_filter("pk__in", self.related_ids[model])
result.append(query)
return result
class InsertQuery(Query):
compiler = "SQLInsertCompiler"
def __init__(
self, *args, on_conflict=None, update_fields=None, unique_fields=None, **kwargs
):
super().__init__(*args, **kwargs)
self.fields = []
self.objs = []
self.on_conflict = on_conflict
self.update_fields = update_fields or []
self.unique_fields = unique_fields or []
def insert_values(self, fields, objs, raw=False):
self.fields = fields
self.objs = objs
self.raw = raw
class AggregateQuery(Query):
"""
Take another query as a parameter to the FROM clause and only select the
elements in the provided list.
"""
compiler = "SQLAggregateCompiler"
def __init__(self, model, inner_query):
self.inner_query = inner_query
super().__init__(model)
|
385fbeb11dd625a81c626fa00659d1b72a59c346eba81692a38a364c4f5c3eca | from django.db import ProgrammingError
from django.utils.functional import cached_property
class BaseDatabaseFeatures:
# An optional tuple indicating the minimum supported database version.
minimum_database_version = None
gis_enabled = False
# Oracle can't group by LOB (large object) data types.
allows_group_by_lob = True
allows_group_by_pk = False
allows_group_by_selected_pks = False
empty_fetchmany_value = []
update_can_self_select = True
# Does the backend distinguish between '' and None?
interprets_empty_strings_as_nulls = False
# Does the backend allow inserting duplicate NULL rows in a nullable
# unique field? All core backends implement this correctly, but other
# databases such as SQL Server do not.
supports_nullable_unique_constraints = True
# Does the backend allow inserting duplicate rows when a unique_together
# constraint exists and some fields are nullable but not all of them?
supports_partially_nullable_unique_constraints = True
# Does the backend support initially deferrable unique constraints?
supports_deferrable_unique_constraints = False
can_use_chunked_reads = True
can_return_columns_from_insert = False
can_return_rows_from_bulk_insert = False
has_bulk_insert = True
uses_savepoints = True
can_release_savepoints = False
# If True, don't use integer foreign keys referring to, e.g., positive
# integer primary keys.
related_fields_match_type = False
allow_sliced_subqueries_with_in = True
has_select_for_update = False
has_select_for_update_nowait = False
has_select_for_update_skip_locked = False
has_select_for_update_of = False
has_select_for_no_key_update = False
# Does the database's SELECT FOR UPDATE OF syntax require a column rather
# than a table?
select_for_update_of_column = False
# Does the default test database allow multiple connections?
# Usually an indication that the test database is in-memory
test_db_allows_multiple_connections = True
# Can an object be saved without an explicit primary key?
supports_unspecified_pk = False
# Can a fixture contain forward references? i.e., are
# FK constraints checked at the end of transaction, or
# at the end of each save operation?
supports_forward_references = True
# Does the backend truncate names properly when they are too long?
truncates_names = False
# Is there a REAL datatype in addition to floats/doubles?
has_real_datatype = False
supports_subqueries_in_group_by = True
# Does the backend ignore unnecessary ORDER BY clauses in subqueries?
ignores_unnecessary_order_by_in_subqueries = True
# Is there a true datatype for uuid?
has_native_uuid_field = False
# Is there a true datatype for timedeltas?
has_native_duration_field = False
# Does the database driver supports same type temporal data subtraction
# by returning the type used to store duration field?
supports_temporal_subtraction = False
# Does the __regex lookup support backreferencing and grouping?
supports_regex_backreferencing = True
# Can date/datetime lookups be performed using a string?
supports_date_lookup_using_string = True
# Can datetimes with timezones be used?
supports_timezones = True
# Does the database have a copy of the zoneinfo database?
has_zoneinfo_database = True
# When performing a GROUP BY, is an ORDER BY NULL required
# to remove any ordering?
requires_explicit_null_ordering_when_grouping = False
# Does the backend order NULL values as largest or smallest?
nulls_order_largest = False
# Does the backend support NULLS FIRST and NULLS LAST in ORDER BY?
supports_order_by_nulls_modifier = True
# Does the backend orders NULLS FIRST by default?
order_by_nulls_first = False
# The database's limit on the number of query parameters.
max_query_params = None
# Can an object have an autoincrement primary key of 0?
allows_auto_pk_0 = True
# Do we need to NULL a ForeignKey out, or can the constraint check be
# deferred
can_defer_constraint_checks = False
# Does the backend support tablespaces? Default to False because it isn't
# in the SQL standard.
supports_tablespaces = False
# Does the backend reset sequences between tests?
supports_sequence_reset = True
# Can the backend introspect the default value of a column?
can_introspect_default = True
# Confirm support for introspected foreign keys
# Every database can do this reliably, except MySQL,
# which can't do it for MyISAM tables
can_introspect_foreign_keys = True
# Map fields which some backends may not be able to differentiate to the
# field it's introspected as.
introspected_field_types = {
"AutoField": "AutoField",
"BigAutoField": "BigAutoField",
"BigIntegerField": "BigIntegerField",
"BinaryField": "BinaryField",
"BooleanField": "BooleanField",
"CharField": "CharField",
"DurationField": "DurationField",
"GenericIPAddressField": "GenericIPAddressField",
"IntegerField": "IntegerField",
"PositiveBigIntegerField": "PositiveBigIntegerField",
"PositiveIntegerField": "PositiveIntegerField",
"PositiveSmallIntegerField": "PositiveSmallIntegerField",
"SmallAutoField": "SmallAutoField",
"SmallIntegerField": "SmallIntegerField",
"TimeField": "TimeField",
}
# Can the backend introspect the column order (ASC/DESC) for indexes?
supports_index_column_ordering = True
# Does the backend support introspection of materialized views?
can_introspect_materialized_views = False
# Support for the DISTINCT ON clause
can_distinct_on_fields = False
# Does the backend prevent running SQL queries in broken transactions?
atomic_transactions = True
# Can we roll back DDL in a transaction?
can_rollback_ddl = False
# Does it support operations requiring references rename in a transaction?
supports_atomic_references_rename = True
# Can we issue more than one ALTER COLUMN clause in an ALTER TABLE?
supports_combined_alters = False
# Does it support foreign keys?
supports_foreign_keys = True
# Can it create foreign key constraints inline when adding columns?
can_create_inline_fk = True
# Does it automatically index foreign keys?
indexes_foreign_keys = True
# Does it support CHECK constraints?
supports_column_check_constraints = True
supports_table_check_constraints = True
# Does the backend support introspection of CHECK constraints?
can_introspect_check_constraints = True
# Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value})
# parameter passing? Note this can be provided by the backend even if not
# supported by the Python driver
supports_paramstyle_pyformat = True
# Does the backend require literal defaults, rather than parameterized ones?
requires_literal_defaults = False
# Does the backend require a connection reset after each material schema change?
connection_persists_old_columns = False
# What kind of error does the backend throw when accessing closed cursor?
closed_cursor_error_class = ProgrammingError
# Does 'a' LIKE 'A' match?
has_case_insensitive_like = False
# Suffix for backends that don't support "SELECT xxx;" queries.
bare_select_suffix = ""
# If NULL is implied on columns without needing to be explicitly specified
implied_column_null = False
# Does the backend support "select for update" queries with limit (and offset)?
supports_select_for_update_with_limit = True
# Does the backend ignore null expressions in GREATEST and LEAST queries unless
# every expression is null?
greatest_least_ignores_nulls = False
# Can the backend clone databases for parallel test execution?
# Defaults to False to allow third-party backends to opt-in.
can_clone_databases = False
# Does the backend consider table names with different casing to
# be equal?
ignores_table_name_case = False
# Place FOR UPDATE right after FROM clause. Used on MSSQL.
for_update_after_from = False
# Combinatorial flags
supports_select_union = True
supports_select_intersection = True
supports_select_difference = True
supports_slicing_ordering_in_compound = False
supports_parentheses_in_compound = True
# Does the database support SQL 2003 FILTER (WHERE ...) in aggregate
# expressions?
supports_aggregate_filter_clause = False
# Does the backend support indexing a TextField?
supports_index_on_text_field = True
# Does the backend support window expressions (expression OVER (...))?
supports_over_clause = False
supports_frame_range_fixed_distance = False
only_supports_unbounded_with_preceding_and_following = False
# Does the backend support CAST with precision?
supports_cast_with_precision = True
# How many second decimals does the database return when casting a value to
# a type with time?
time_cast_precision = 6
# SQL to create a procedure for use by the Django test suite. The
# functionality of the procedure isn't important.
create_test_procedure_without_params_sql = None
create_test_procedure_with_int_param_sql = None
# Does the backend support keyword parameters for cursor.callproc()?
supports_callproc_kwargs = False
# What formats does the backend EXPLAIN syntax support?
supported_explain_formats = set()
# Does the backend support the default parameter in lead() and lag()?
supports_default_in_lead_lag = True
# Does the backend support ignoring constraint or uniqueness errors during
# INSERT?
supports_ignore_conflicts = True
# Does the backend support updating rows on constraint or uniqueness errors
# during INSERT?
supports_update_conflicts = False
supports_update_conflicts_with_target = False
# Does this backend require casting the results of CASE expressions used
# in UPDATE statements to ensure the expression has the correct type?
requires_casted_case_in_updates = False
# Does the backend support partial indexes (CREATE INDEX ... WHERE ...)?
supports_partial_indexes = True
supports_functions_in_partial_indexes = True
# Does the backend support covering indexes (CREATE INDEX ... INCLUDE ...)?
supports_covering_indexes = False
# Does the backend support indexes on expressions?
supports_expression_indexes = True
# Does the backend treat COLLATE as an indexed expression?
collate_as_index_expression = False
# Does the database allow more than one constraint or index on the same
# field(s)?
allows_multiple_constraints_on_same_fields = True
# Does the backend support boolean expressions in SELECT and GROUP BY
# clauses?
supports_boolean_expr_in_select_clause = True
# Does the backend support JSONField?
supports_json_field = True
# Can the backend introspect a JSONField?
can_introspect_json_field = True
# Does the backend support primitives in JSONField?
supports_primitives_in_json_field = True
# Is there a true datatype for JSON?
has_native_json_field = False
# Does the backend use PostgreSQL-style JSON operators like '->'?
has_json_operators = False
# Does the backend support __contains and __contained_by lookups for
# a JSONField?
supports_json_field_contains = True
# Does value__d__contains={'f': 'g'} (without a list around the dict) match
# {'d': [{'f': 'g'}]}?
json_key_contains_list_matching_requires_list = False
# Does the backend support JSONObject() database function?
has_json_object_function = True
# Does the backend support column collations?
supports_collation_on_charfield = True
supports_collation_on_textfield = True
# Does the backend support non-deterministic collations?
supports_non_deterministic_collations = True
# Does the backend support the logical XOR operator?
supports_logical_xor = False
# Collation names for use by the Django test suite.
test_collations = {
"ci": None, # Case-insensitive.
"cs": None, # Case-sensitive.
"non_default": None, # Non-default.
"swedish_ci": None, # Swedish case-insensitive.
}
# SQL template override for tests.aggregation.tests.NowUTC
test_now_utc_template = None
# A set of dotted paths to tests in Django's test suite that are expected
# to fail on this database.
django_test_expected_failures = set()
# A map of reasons to sets of dotted paths to tests in Django's test suite
# that should be skipped for this database.
django_test_skips = {}
def __init__(self, connection):
self.connection = connection
@cached_property
def supports_explaining_query_execution(self):
"""Does this backend support explaining query execution?"""
return self.connection.ops.explain_prefix is not None
@cached_property
def supports_transactions(self):
"""Confirm support for transactions."""
with self.connection.cursor() as cursor:
cursor.execute("CREATE TABLE ROLLBACK_TEST (X INT)")
self.connection.set_autocommit(False)
cursor.execute("INSERT INTO ROLLBACK_TEST (X) VALUES (8)")
self.connection.rollback()
self.connection.set_autocommit(True)
cursor.execute("SELECT COUNT(X) FROM ROLLBACK_TEST")
(count,) = cursor.fetchone()
cursor.execute("DROP TABLE ROLLBACK_TEST")
return count == 0
def allows_group_by_selected_pks_on_model(self, model):
if not self.allows_group_by_selected_pks:
return False
return model._meta.managed
|
35e7f27119fe6d1325d756a5d00ac8b382c150949bd60cd2b428c41b96d0bb61 | import operator
from django.db import InterfaceError
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
minimum_database_version = (10,)
allows_group_by_selected_pks = True
can_return_columns_from_insert = True
can_return_rows_from_bulk_insert = True
has_real_datatype = True
has_native_uuid_field = True
has_native_duration_field = True
has_native_json_field = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_select_for_update_of = True
has_select_for_update_skip_locked = True
has_select_for_no_key_update = True
can_release_savepoints = True
supports_tablespaces = True
supports_transactions = True
can_introspect_materialized_views = True
can_distinct_on_fields = True
can_rollback_ddl = True
supports_combined_alters = True
nulls_order_largest = True
closed_cursor_error_class = InterfaceError
greatest_least_ignores_nulls = True
can_clone_databases = True
supports_temporal_subtraction = True
supports_slicing_ordering_in_compound = True
create_test_procedure_without_params_sql = """
CREATE FUNCTION test_procedure () RETURNS void AS $$
DECLARE
V_I INTEGER;
BEGIN
V_I := 1;
END;
$$ LANGUAGE plpgsql;"""
create_test_procedure_with_int_param_sql = """
CREATE FUNCTION test_procedure (P_I INTEGER) RETURNS void AS $$
DECLARE
V_I INTEGER;
BEGIN
V_I := P_I;
END;
$$ LANGUAGE plpgsql;"""
requires_casted_case_in_updates = True
supports_over_clause = True
only_supports_unbounded_with_preceding_and_following = True
supports_aggregate_filter_clause = True
supported_explain_formats = {"JSON", "TEXT", "XML", "YAML"}
supports_deferrable_unique_constraints = True
has_json_operators = True
json_key_contains_list_matching_requires_list = True
supports_update_conflicts = True
supports_update_conflicts_with_target = True
test_collations = {
"non_default": "sv-x-icu",
"swedish_ci": "sv-x-icu",
}
test_now_utc_template = "STATEMENT_TIMESTAMP() AT TIME ZONE 'UTC'"
django_test_skips = {
"opclasses are PostgreSQL only.": {
"indexes.tests.SchemaIndexesNotPostgreSQLTests."
"test_create_index_ignores_opclasses",
},
}
@cached_property
def introspected_field_types(self):
return {
**super().introspected_field_types,
"PositiveBigIntegerField": "BigIntegerField",
"PositiveIntegerField": "IntegerField",
"PositiveSmallIntegerField": "SmallIntegerField",
}
@cached_property
def is_postgresql_11(self):
return self.connection.pg_version >= 110000
@cached_property
def is_postgresql_12(self):
return self.connection.pg_version >= 120000
@cached_property
def is_postgresql_13(self):
return self.connection.pg_version >= 130000
@cached_property
def is_postgresql_14(self):
return self.connection.pg_version >= 140000
has_bit_xor = property(operator.attrgetter("is_postgresql_14"))
has_websearch_to_tsquery = property(operator.attrgetter("is_postgresql_11"))
supports_covering_indexes = property(operator.attrgetter("is_postgresql_11"))
supports_covering_gist_indexes = property(operator.attrgetter("is_postgresql_12"))
supports_covering_spgist_indexes = property(operator.attrgetter("is_postgresql_14"))
supports_non_deterministic_collations = property(
operator.attrgetter("is_postgresql_12")
)
|
56cfc4882d35a8420370e91ee07f4c49fd1a3a6b118eab30d28c840296e6a5d4 | from psycopg2.extras import Inet
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
from django.db.backends.utils import split_tzname_delta
from django.db.models.constants import OnConflict
class DatabaseOperations(BaseDatabaseOperations):
cast_char_field_without_max_length = "varchar"
explain_prefix = "EXPLAIN"
explain_options = frozenset(
[
"ANALYZE",
"BUFFERS",
"COSTS",
"SETTINGS",
"SUMMARY",
"TIMING",
"VERBOSE",
"WAL",
]
)
cast_data_types = {
"AutoField": "integer",
"BigAutoField": "bigint",
"SmallAutoField": "smallint",
}
def unification_cast_sql(self, output_field):
internal_type = output_field.get_internal_type()
if internal_type in (
"GenericIPAddressField",
"IPAddressField",
"TimeField",
"UUIDField",
):
# PostgreSQL will resolve a union as type 'text' if input types are
# 'unknown'.
# https://www.postgresql.org/docs/current/typeconv-union-case.html
# These fields cannot be implicitly cast back in the default
# PostgreSQL configuration so we need to explicitly cast them.
# We must also remove components of the type within brackets:
# varchar(255) -> varchar.
return (
"CAST(%%s AS %s)" % output_field.db_type(self.connection).split("(")[0]
)
return "%s"
def date_extract_sql(self, lookup_type, field_name):
# https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT
if lookup_type == "week_day":
# For consistency across backends, we return Sunday=1, Saturday=7.
return "EXTRACT('dow' FROM %s) + 1" % field_name
elif lookup_type == "iso_week_day":
return "EXTRACT('isodow' FROM %s)" % field_name
elif lookup_type == "iso_year":
return "EXTRACT('isoyear' FROM %s)" % field_name
else:
return "EXTRACT('%s' FROM %s)" % (lookup_type, field_name)
def date_trunc_sql(self, lookup_type, field_name, tzname=None):
field_name = self._convert_field_to_tz(field_name, tzname)
# https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
def _prepare_tzname_delta(self, tzname):
tzname, sign, offset = split_tzname_delta(tzname)
if offset:
sign = "-" if sign == "+" else "+"
return f"{tzname}{sign}{offset}"
return tzname
def _convert_field_to_tz(self, field_name, tzname):
if tzname and settings.USE_TZ:
field_name = "%s AT TIME ZONE '%s'" % (
field_name,
self._prepare_tzname_delta(tzname),
)
return field_name
def datetime_cast_date_sql(self, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
return "(%s)::date" % field_name
def datetime_cast_time_sql(self, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
return "(%s)::time" % field_name
def datetime_extract_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
if lookup_type == "second":
# Truncate fractional seconds.
return f"EXTRACT('second' FROM DATE_TRUNC('second', {field_name}))"
return self.date_extract_sql(lookup_type, field_name)
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
# https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
def time_extract_sql(self, lookup_type, field_name):
if lookup_type == "second":
# Truncate fractional seconds.
return f"EXTRACT('second' FROM DATE_TRUNC('second', {field_name}))"
return self.date_extract_sql(lookup_type, field_name)
def time_trunc_sql(self, lookup_type, field_name, tzname=None):
field_name = self._convert_field_to_tz(field_name, tzname)
return "DATE_TRUNC('%s', %s)::time" % (lookup_type, field_name)
def deferrable_sql(self):
return " DEFERRABLE INITIALLY DEFERRED"
def fetch_returned_insert_rows(self, cursor):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the tuple of returned data.
"""
return cursor.fetchall()
def lookup_cast(self, lookup_type, internal_type=None):
lookup = "%s"
# Cast text lookups to text to allow things like filter(x__contains=4)
if lookup_type in (
"iexact",
"contains",
"icontains",
"startswith",
"istartswith",
"endswith",
"iendswith",
"regex",
"iregex",
):
if internal_type in ("IPAddressField", "GenericIPAddressField"):
lookup = "HOST(%s)"
elif internal_type in ("CICharField", "CIEmailField", "CITextField"):
lookup = "%s::citext"
else:
lookup = "%s::text"
# Use UPPER(x) for case-insensitive lookups; it's faster.
if lookup_type in ("iexact", "icontains", "istartswith", "iendswith"):
lookup = "UPPER(%s)" % lookup
return lookup
def no_limit_value(self):
return None
def prepare_sql_script(self, sql):
return [sql]
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def set_time_zone_sql(self):
return "SET TIME ZONE %s"
def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
if not tables:
return []
# Perform a single SQL 'TRUNCATE x, y, z...;' statement. It allows us
# to truncate tables referenced by a foreign key in any other table.
sql_parts = [
style.SQL_KEYWORD("TRUNCATE"),
", ".join(style.SQL_FIELD(self.quote_name(table)) for table in tables),
]
if reset_sequences:
sql_parts.append(style.SQL_KEYWORD("RESTART IDENTITY"))
if allow_cascade:
sql_parts.append(style.SQL_KEYWORD("CASCADE"))
return ["%s;" % " ".join(sql_parts)]
def sequence_reset_by_name_sql(self, style, sequences):
# 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements
# to reset sequence indices
sql = []
for sequence_info in sequences:
table_name = sequence_info["table"]
# 'id' will be the case if it's an m2m using an autogenerated
# intermediate table (see BaseDatabaseIntrospection.sequence_list).
column_name = sequence_info["column"] or "id"
sql.append(
"%s setval(pg_get_serial_sequence('%s','%s'), 1, false);"
% (
style.SQL_KEYWORD("SELECT"),
style.SQL_TABLE(self.quote_name(table_name)),
style.SQL_FIELD(column_name),
)
)
return sql
def tablespace_sql(self, tablespace, inline=False):
if inline:
return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace)
else:
return "TABLESPACE %s" % self.quote_name(tablespace)
def sequence_reset_sql(self, style, model_list):
from django.db import models
output = []
qn = self.quote_name
for model in model_list:
# Use `coalesce` to set the sequence for each model to the max pk
# value if there are records, or 1 if there are none. Set the
# `is_called` property (the third argument to `setval`) to true if
# there are records (as the max pk value is already in use),
# otherwise set it to false. Use pg_get_serial_sequence to get the
# underlying sequence name from the table name and column name.
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;"
% (
style.SQL_KEYWORD("SELECT"),
style.SQL_TABLE(qn(model._meta.db_table)),
style.SQL_FIELD(f.column),
style.SQL_FIELD(qn(f.column)),
style.SQL_FIELD(qn(f.column)),
style.SQL_KEYWORD("IS NOT"),
style.SQL_KEYWORD("FROM"),
style.SQL_TABLE(qn(model._meta.db_table)),
)
)
# Only one AutoField is allowed per model, so don't bother
# continuing.
break
return output
def prep_for_iexact_query(self, x):
return x
def max_name_length(self):
"""
Return the maximum length of an identifier.
The maximum length of an identifier is 63 by default, but can be
changed by recompiling PostgreSQL after editing the NAMEDATALEN
macro in src/include/pg_config_manual.h.
This implementation returns 63, but can be overridden by a custom
database backend that inherits most of its behavior from this one.
"""
return 63
def distinct_sql(self, fields, params):
if fields:
params = [param for param_list in params for param in param_list]
return (["DISTINCT ON (%s)" % ", ".join(fields)], params)
else:
return ["DISTINCT"], []
def last_executed_query(self, cursor, sql, params):
# https://www.psycopg.org/docs/cursor.html#cursor.query
# The query attribute is a Psycopg extension to the DB API 2.0.
if cursor.query is not None:
return cursor.query.decode()
return None
def return_insert_columns(self, fields):
if not fields:
return "", ()
columns = [
"%s.%s"
% (
self.quote_name(field.model._meta.db_table),
self.quote_name(field.column),
)
for field in fields
]
return "RETURNING %s" % ", ".join(columns), ()
def bulk_insert_sql(self, fields, placeholder_rows):
placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql)
return "VALUES " + values_sql
def adapt_datefield_value(self, value):
return value
def adapt_datetimefield_value(self, value):
return value
def adapt_timefield_value(self, value):
return value
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
return value
def adapt_ipaddressfield_value(self, value):
if value:
return Inet(value)
return None
def subtract_temporals(self, internal_type, lhs, rhs):
if internal_type == "DateField":
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
params = (*lhs_params, *rhs_params)
return "(interval '1 day' * (%s - %s))" % (lhs_sql, rhs_sql), params
return super().subtract_temporals(internal_type, lhs, rhs)
def explain_query_prefix(self, format=None, **options):
extra = {}
# Normalize options.
if options:
options = {
name.upper(): "true" if value else "false"
for name, value in options.items()
}
for valid_option in self.explain_options:
value = options.pop(valid_option, None)
if value is not None:
extra[valid_option.upper()] = value
prefix = super().explain_query_prefix(format, **options)
if format:
extra["FORMAT"] = format
if extra:
prefix += " (%s)" % ", ".join("%s %s" % i for i in extra.items())
return prefix
def on_conflict_suffix_sql(self, fields, on_conflict, update_fields, unique_fields):
if on_conflict == OnConflict.IGNORE:
return "ON CONFLICT DO NOTHING"
if on_conflict == OnConflict.UPDATE:
return "ON CONFLICT(%s) DO UPDATE SET %s" % (
", ".join(map(self.quote_name, unique_fields)),
", ".join(
[
f"{field} = EXCLUDED.{field}"
for field in map(self.quote_name, update_fields)
]
),
)
return super().on_conflict_suffix_sql(
fields,
on_conflict,
update_fields,
unique_fields,
)
|
a531b364ccf434ed813fba9da5d035a0d1191719c6e66f2938bd877265ac46f4 | import os
import pathlib
from datetime import datetime, timezone
from urllib.parse import urljoin
from django.conf import settings
from django.core.exceptions import SuspiciousFileOperation
from django.core.files import File, locks
from django.core.files.move import file_move_safe
from django.core.files.utils import validate_file_name
from django.core.signals import setting_changed
from django.utils._os import safe_join
from django.utils.crypto import get_random_string
from django.utils.deconstruct import deconstructible
from django.utils.encoding import filepath_to_uri
from django.utils.functional import LazyObject, cached_property
from django.utils.module_loading import import_string
from django.utils.text import get_valid_filename
__all__ = (
"Storage",
"FileSystemStorage",
"DefaultStorage",
"default_storage",
"get_storage_class",
)
class Storage:
"""
A base storage class, providing some default behaviors that all other
storage systems can inherit or override, as necessary.
"""
# The following methods represent a public interface to private methods.
# These shouldn't be overridden by subclasses unless absolutely necessary.
def open(self, name, mode="rb"):
"""Retrieve the specified file from storage."""
return self._open(name, mode)
def save(self, name, content, max_length=None):
"""
Save new content to the file specified by name. The content should be
a proper File object or any Python file-like object, ready to be read
from the beginning.
"""
# Get the proper name for the file, as it will actually be saved.
if name is None:
name = content.name
if not hasattr(content, "chunks"):
content = File(content, name)
name = self.get_available_name(name, max_length=max_length)
name = self._save(name, content)
# Ensure that the name returned from the storage system is still valid.
validate_file_name(name, allow_relative_path=True)
return name
# These methods are part of the public API, with default implementations.
def get_valid_name(self, name):
"""
Return a filename, based on the provided filename, that's suitable for
use in the target storage system.
"""
return get_valid_filename(name)
def get_alternative_name(self, file_root, file_ext):
"""
Return an alternative filename, by adding an underscore and a random 7
character alphanumeric string (before the file extension, if one
exists) to the filename.
"""
return "%s_%s%s" % (file_root, get_random_string(7), file_ext)
def get_available_name(self, name, max_length=None):
"""
Return a filename that's free on the target storage system and
available for new content to be written to.
"""
name = str(name).replace("\\", "/")
dir_name, file_name = os.path.split(name)
if ".." in pathlib.PurePath(dir_name).parts:
raise SuspiciousFileOperation(
"Detected path traversal attempt in '%s'" % dir_name
)
validate_file_name(file_name)
file_root, file_ext = os.path.splitext(file_name)
# If the filename already exists, generate an alternative filename
# until it doesn't exist.
# Truncate original name if required, so the new filename does not
# exceed the max_length.
while self.exists(name) or (max_length and len(name) > max_length):
# file_ext includes the dot.
name = os.path.join(
dir_name, self.get_alternative_name(file_root, file_ext)
)
if max_length is None:
continue
# Truncate file_root if max_length exceeded.
truncation = len(name) - max_length
if truncation > 0:
file_root = file_root[:-truncation]
# Entire file_root was truncated in attempt to find an
# available filename.
if not file_root:
raise SuspiciousFileOperation(
'Storage can not find an available filename for "%s". '
"Please make sure that the corresponding file field "
'allows sufficient "max_length".' % name
)
name = os.path.join(
dir_name, self.get_alternative_name(file_root, file_ext)
)
return name
def generate_filename(self, filename):
"""
Validate the filename by calling get_valid_name() and return a filename
to be passed to the save() method.
"""
filename = str(filename).replace("\\", "/")
# `filename` may include a path as returned by FileField.upload_to.
dirname, filename = os.path.split(filename)
if ".." in pathlib.PurePath(dirname).parts:
raise SuspiciousFileOperation(
"Detected path traversal attempt in '%s'" % dirname
)
return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename)))
def path(self, name):
"""
Return a local filesystem path where the file can be retrieved using
Python's built-in open() function. Storage systems that can't be
accessed using open() should *not* implement this method.
"""
raise NotImplementedError("This backend doesn't support absolute paths.")
# The following methods form the public API for storage systems, but with
# no default implementations. Subclasses must implement *all* of these.
def delete(self, name):
"""
Delete the specified file from the storage system.
"""
raise NotImplementedError(
"subclasses of Storage must provide a delete() method"
)
def exists(self, name):
"""
Return True if a file referenced by the given name already exists in the
storage system, or False if the name is available for a new file.
"""
raise NotImplementedError(
"subclasses of Storage must provide an exists() method"
)
def listdir(self, path):
"""
List the contents of the specified path. Return a 2-tuple of lists:
the first item being directories, the second item being files.
"""
raise NotImplementedError(
"subclasses of Storage must provide a listdir() method"
)
def size(self, name):
"""
Return the total size, in bytes, of the file specified by name.
"""
raise NotImplementedError("subclasses of Storage must provide a size() method")
def url(self, name):
"""
Return an absolute URL where the file's contents can be accessed
directly by a web browser.
"""
raise NotImplementedError("subclasses of Storage must provide a url() method")
def get_accessed_time(self, name):
"""
Return the last accessed time (as a datetime) of the file specified by
name. The datetime will be timezone-aware if USE_TZ=True.
"""
raise NotImplementedError(
"subclasses of Storage must provide a get_accessed_time() method"
)
def get_created_time(self, name):
"""
Return the creation time (as a datetime) of the file specified by name.
The datetime will be timezone-aware if USE_TZ=True.
"""
raise NotImplementedError(
"subclasses of Storage must provide a get_created_time() method"
)
def get_modified_time(self, name):
"""
Return the last modified time (as a datetime) of the file specified by
name. The datetime will be timezone-aware if USE_TZ=True.
"""
raise NotImplementedError(
"subclasses of Storage must provide a get_modified_time() method"
)
@deconstructible
class FileSystemStorage(Storage):
"""
Standard filesystem storage
"""
# The combination of O_CREAT and O_EXCL makes os.open() raise OSError if
# the file already exists before it's opened.
OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, "O_BINARY", 0)
def __init__(
self,
location=None,
base_url=None,
file_permissions_mode=None,
directory_permissions_mode=None,
):
self._location = location
self._base_url = base_url
self._file_permissions_mode = file_permissions_mode
self._directory_permissions_mode = directory_permissions_mode
setting_changed.connect(self._clear_cached_properties)
def _clear_cached_properties(self, setting, **kwargs):
"""Reset setting based property values."""
if setting == "MEDIA_ROOT":
self.__dict__.pop("base_location", None)
self.__dict__.pop("location", None)
elif setting == "MEDIA_URL":
self.__dict__.pop("base_url", None)
elif setting == "FILE_UPLOAD_PERMISSIONS":
self.__dict__.pop("file_permissions_mode", None)
elif setting == "FILE_UPLOAD_DIRECTORY_PERMISSIONS":
self.__dict__.pop("directory_permissions_mode", None)
def _value_or_setting(self, value, setting):
return setting if value is None else value
@cached_property
def base_location(self):
return self._value_or_setting(self._location, settings.MEDIA_ROOT)
@cached_property
def location(self):
return os.path.abspath(self.base_location)
@cached_property
def base_url(self):
if self._base_url is not None and not self._base_url.endswith("/"):
self._base_url += "/"
return self._value_or_setting(self._base_url, settings.MEDIA_URL)
@cached_property
def file_permissions_mode(self):
return self._value_or_setting(
self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS
)
@cached_property
def directory_permissions_mode(self):
return self._value_or_setting(
self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS
)
def _open(self, name, mode="rb"):
return File(open(self.path(name), mode))
def _save(self, name, content):
full_path = self.path(name)
# Create any intermediate directories that do not exist.
directory = os.path.dirname(full_path)
try:
if self.directory_permissions_mode is not None:
# Set the umask because os.makedirs() doesn't apply the "mode"
# argument to intermediate-level directories.
old_umask = os.umask(0o777 & ~self.directory_permissions_mode)
try:
os.makedirs(
directory, self.directory_permissions_mode, exist_ok=True
)
finally:
os.umask(old_umask)
else:
os.makedirs(directory, exist_ok=True)
except FileExistsError:
raise FileExistsError("%s exists and is not a directory." % directory)
# There's a potential race condition between get_available_name and
# saving the file; it's possible that two threads might return the
# same name, at which point all sorts of fun happens. So we need to
# try to create the file, but if it already exists we have to go back
# to get_available_name() and try again.
while True:
try:
# This file has a file path that we can move.
if hasattr(content, "temporary_file_path"):
file_move_safe(content.temporary_file_path(), full_path)
# This is a normal uploadedfile that we can stream.
else:
# The current umask value is masked out by os.open!
fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666)
_file = None
try:
locks.lock(fd, locks.LOCK_EX)
for chunk in content.chunks():
if _file is None:
mode = "wb" if isinstance(chunk, bytes) else "wt"
_file = os.fdopen(fd, mode)
_file.write(chunk)
finally:
locks.unlock(fd)
if _file is not None:
_file.close()
else:
os.close(fd)
except FileExistsError:
# A new name is needed if the file exists.
name = self.get_available_name(name)
full_path = self.path(name)
else:
# OK, the file save worked. Break out of the loop.
break
if self.file_permissions_mode is not None:
os.chmod(full_path, self.file_permissions_mode)
# Ensure the saved path is always relative to the storage root.
name = os.path.relpath(full_path, self.location)
# Ensure the moved file has the same gid as the storage root.
self._ensure_location_group_id(full_path)
# Store filenames with forward slashes, even on Windows.
return str(name).replace("\\", "/")
def _ensure_location_group_id(self, full_path):
if os.name == "posix":
file_gid = os.stat(full_path).st_gid
location_gid = os.stat(self.location).st_gid
if file_gid != location_gid:
try:
os.chown(full_path, uid=-1, gid=location_gid)
except PermissionError:
pass
def delete(self, name):
if not name:
raise ValueError("The name must be given to delete().")
name = self.path(name)
# If the file or directory exists, delete it from the filesystem.
try:
if os.path.isdir(name):
os.rmdir(name)
else:
os.remove(name)
except FileNotFoundError:
# FileNotFoundError is raised if the file or directory was removed
# concurrently.
pass
def exists(self, name):
return os.path.lexists(self.path(name))
def listdir(self, path):
path = self.path(path)
directories, files = [], []
with os.scandir(path) as entries:
for entry in entries:
if entry.is_dir():
directories.append(entry.name)
else:
files.append(entry.name)
return directories, files
def path(self, name):
return safe_join(self.location, name)
def size(self, name):
return os.path.getsize(self.path(name))
def url(self, name):
if self.base_url is None:
raise ValueError("This file is not accessible via a URL.")
url = filepath_to_uri(name)
if url is not None:
url = url.lstrip("/")
return urljoin(self.base_url, url)
def _datetime_from_timestamp(self, ts):
"""
If timezone support is enabled, make an aware datetime object in UTC;
otherwise make a naive one in the local timezone.
"""
tz = timezone.utc if settings.USE_TZ else None
return datetime.fromtimestamp(ts, tz=tz)
def get_accessed_time(self, name):
return self._datetime_from_timestamp(os.path.getatime(self.path(name)))
def get_created_time(self, name):
return self._datetime_from_timestamp(os.path.getctime(self.path(name)))
def get_modified_time(self, name):
return self._datetime_from_timestamp(os.path.getmtime(self.path(name)))
def get_storage_class(import_path=None):
return import_string(import_path or settings.DEFAULT_FILE_STORAGE)
class DefaultStorage(LazyObject):
def _setup(self):
self._wrapped = get_storage_class()()
default_storage = DefaultStorage()
|
17a9f4aa1d3308d73dd87e160c59272265ebd3c88785aee86999e6c9351569d7 | import datetime
import math
import re
from decimal import Decimal
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models import (
Avg,
Case,
Count,
DateField,
DateTimeField,
DecimalField,
DurationField,
Exists,
F,
FloatField,
IntegerField,
Max,
Min,
OuterRef,
Q,
StdDev,
Subquery,
Sum,
TimeField,
Value,
Variance,
When,
)
from django.db.models.expressions import Func, RawSQL
from django.db.models.functions import (
Cast,
Coalesce,
Greatest,
Now,
Pi,
TruncDate,
TruncHour,
)
from django.test import TestCase
from django.test.testcases import skipUnlessDBFeature
from django.test.utils import Approximate, CaptureQueriesContext
from django.utils import timezone
from .models import Author, Book, Publisher, Store
class NowUTC(Now):
template = "CURRENT_TIMESTAMP"
output_field = DateTimeField()
def as_sql(self, compiler, connection, **extra_context):
if connection.features.test_now_utc_template:
extra_context["template"] = connection.features.test_now_utc_template
return super().as_sql(compiler, connection, **extra_context)
class AggregateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="Brad Dayley", age=45)
cls.a4 = Author.objects.create(name="James Bennett", age=29)
cls.a5 = Author.objects.create(name="Jeffrey Forcier", age=37)
cls.a6 = Author.objects.create(name="Paul Bissex", age=29)
cls.a7 = Author.objects.create(name="Wesley J. Chun", age=25)
cls.a8 = Author.objects.create(name="Peter Norvig", age=57)
cls.a9 = Author.objects.create(name="Stuart Russell", age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(
name="Apress", num_awards=3, duration=datetime.timedelta(days=1)
)
cls.p2 = Publisher.objects.create(
name="Sams", num_awards=1, duration=datetime.timedelta(days=2)
)
cls.p3 = Publisher.objects.create(name="Prentice Hall", num_awards=7)
cls.p4 = Publisher.objects.create(name="Morgan Kaufmann", num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn="159059725",
name="The Definitive Guide to Django: Web Development Done Right",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6),
)
cls.b2 = Book.objects.create(
isbn="067232959",
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
rating=3.0,
price=Decimal("23.09"),
contact=cls.a3,
publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3),
)
cls.b3 = Book.objects.create(
isbn="159059996",
name="Practical Django Projects",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a4,
publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23),
)
cls.b4 = Book.objects.create(
isbn="013235613",
name="Python Web Development with Django",
pages=350,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a5,
publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3),
)
cls.b5 = Book.objects.create(
isbn="013790395",
name="Artificial Intelligence: A Modern Approach",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a8,
publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15),
)
cls.b6 = Book.objects.create(
isbn="155860191",
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a8,
publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15),
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
s2 = Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30),
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def test_empty_aggregate(self):
self.assertEqual(Author.objects.aggregate(), {})
def test_aggregate_in_order_by(self):
msg = (
"Using an aggregate in order_by() without also including it in "
"annotate() is not allowed: Avg(F(book__rating)"
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.values("age").order_by(Avg("book__rating"))
def test_single_aggregate(self):
vals = Author.objects.aggregate(Avg("age"))
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
def test_multiple_aggregates(self):
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
self.assertEqual(
vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)}
)
def test_filter_aggregate(self):
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
self.assertEqual(vals, {"age__sum": 254})
def test_related_aggregate(self):
vals = Author.objects.aggregate(Avg("friends__age"))
self.assertEqual(vals, {"friends__age__avg": Approximate(34.07, places=2)})
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
self.assertEqual(vals, {"authors__age__avg": Approximate(38.2857, places=2)})
vals = Author.objects.filter(name__contains="a").aggregate(Avg("book__rating"))
self.assertEqual(vals, {"book__rating__avg": 4.0})
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
self.assertEqual(vals, {"publisher__num_awards__sum": 30})
vals = Publisher.objects.aggregate(Sum("book__price"))
self.assertEqual(vals, {"book__price__sum": Decimal("270.27")})
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(vals, {"books__authors__age__max": 57})
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(vals, {"book__publisher__num_awards__min": 1})
def test_aggregate_alias(self):
vals = Store.objects.filter(name="Amazon.com").aggregate(
amazon_mean=Avg("books__rating")
)
self.assertEqual(vals, {"amazon_mean": Approximate(4.08, places=2)})
def test_aggregate_transform(self):
vals = Store.objects.aggregate(min_month=Min("original_opening__month"))
self.assertEqual(vals, {"min_month": 3})
def test_aggregate_join_transform(self):
vals = Publisher.objects.aggregate(min_year=Min("book__pubdate__year"))
self.assertEqual(vals, {"min_year": 1991})
def test_annotate_basic(self):
self.assertQuerysetEqual(
Book.objects.annotate().order_by("pk"),
[
"The Definitive Guide to Django: Web Development Done Right",
"Sams Teach Yourself Django in 24 Hours",
"Practical Django Projects",
"Python Web Development with Django",
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
],
lambda b: b.name,
)
books = Book.objects.annotate(mean_age=Avg("authors__age"))
b = books.get(pk=self.b1.pk)
self.assertEqual(
b.name, "The Definitive Guide to Django: Web Development Done Right"
)
self.assertEqual(b.mean_age, 34.5)
def test_annotate_defer(self):
qs = (
Book.objects.annotate(page_sum=Sum("pages"))
.defer("name")
.filter(pk=self.b1.pk)
)
rows = [
(
self.b1.id,
"159059725",
447,
"The Definitive Guide to Django: Web Development Done Right",
)
]
self.assertQuerysetEqual(
qs.order_by("pk"), rows, lambda r: (r.id, r.isbn, r.page_sum, r.name)
)
def test_annotate_defer_select_related(self):
qs = (
Book.objects.select_related("contact")
.annotate(page_sum=Sum("pages"))
.defer("name")
.filter(pk=self.b1.pk)
)
rows = [
(
self.b1.id,
"159059725",
447,
"Adrian Holovaty",
"The Definitive Guide to Django: Web Development Done Right",
)
]
self.assertQuerysetEqual(
qs.order_by("pk"),
rows,
lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name),
)
def test_annotate_m2m(self):
books = (
Book.objects.filter(rating__lt=4.5)
.annotate(Avg("authors__age"))
.order_by("name")
)
self.assertQuerysetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 51.5),
("Practical Django Projects", 29.0),
("Python Web Development with Django", Approximate(30.3, places=1)),
("Sams Teach Yourself Django in 24 Hours", 45.0),
],
lambda b: (b.name, b.authors__age__avg),
)
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
self.assertQuerysetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 2),
(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
1,
),
("Practical Django Projects", 1),
("Python Web Development with Django", 3),
("Sams Teach Yourself Django in 24 Hours", 1),
("The Definitive Guide to Django: Web Development Done Right", 2),
],
lambda b: (b.name, b.num_authors),
)
def test_backwards_m2m_annotate(self):
authors = (
Author.objects.filter(name__contains="a")
.annotate(Avg("book__rating"))
.order_by("name")
)
self.assertQuerysetEqual(
authors,
[
("Adrian Holovaty", 4.5),
("Brad Dayley", 3.0),
("Jacob Kaplan-Moss", 4.5),
("James Bennett", 4.0),
("Paul Bissex", 4.0),
("Stuart Russell", 4.0),
],
lambda a: (a.name, a.book__rating__avg),
)
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
self.assertQuerysetEqual(
authors,
[
("Adrian Holovaty", 1),
("Brad Dayley", 1),
("Jacob Kaplan-Moss", 1),
("James Bennett", 1),
("Jeffrey Forcier", 1),
("Paul Bissex", 1),
("Peter Norvig", 2),
("Stuart Russell", 1),
("Wesley J. Chun", 1),
],
lambda a: (a.name, a.num_books),
)
def test_reverse_fkey_annotate(self):
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
self.assertQuerysetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 7),
(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
9,
),
("Practical Django Projects", 3),
("Python Web Development with Django", 7),
("Sams Teach Yourself Django in 24 Hours", 1),
("The Definitive Guide to Django: Web Development Done Right", 3),
],
lambda b: (b.name, b.publisher__num_awards__sum),
)
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
self.assertQuerysetEqual(
publishers,
[
("Apress", Decimal("59.69")),
("Jonno's House of Books", None),
("Morgan Kaufmann", Decimal("75.00")),
("Prentice Hall", Decimal("112.49")),
("Sams", Decimal("23.09")),
],
lambda p: (p.name, p.book__price__sum),
)
def test_annotate_values(self):
books = list(
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values()
)
self.assertEqual(
books,
[
{
"contact_id": self.a1.id,
"id": self.b1.id,
"isbn": "159059725",
"mean_age": 34.5,
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": self.p1.id,
"rating": 4.5,
}
],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values("pk", "isbn", "mean_age")
)
self.assertEqual(
list(books),
[
{
"pk": self.b1.pk,
"isbn": "159059725",
"mean_age": 34.5,
}
],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values("name")
)
self.assertEqual(
list(books),
[{"name": "The Definitive Guide to Django: Web Development Done Right"}],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.values()
.annotate(mean_age=Avg("authors__age"))
)
self.assertEqual(
list(books),
[
{
"contact_id": self.a1.id,
"id": self.b1.id,
"isbn": "159059725",
"mean_age": 34.5,
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": self.p1.id,
"rating": 4.5,
}
],
)
books = (
Book.objects.values("rating")
.annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age"))
.order_by("rating")
)
self.assertEqual(
list(books),
[
{
"rating": 3.0,
"n_authors": 1,
"mean_age": 45.0,
},
{
"rating": 4.0,
"n_authors": 6,
"mean_age": Approximate(37.16, places=1),
},
{
"rating": 4.5,
"n_authors": 2,
"mean_age": 34.5,
},
{
"rating": 5.0,
"n_authors": 1,
"mean_age": 57.0,
},
],
)
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
self.assertQuerysetEqual(
authors,
[
("Adrian Holovaty", 32.0),
("Brad Dayley", None),
("Jacob Kaplan-Moss", 29.5),
("James Bennett", 34.0),
("Jeffrey Forcier", 27.0),
("Paul Bissex", 31.0),
("Peter Norvig", 46.0),
("Stuart Russell", 57.0),
("Wesley J. Chun", Approximate(33.66, places=1)),
],
lambda a: (a.name, a.friends__age__avg),
)
def test_count(self):
vals = Book.objects.aggregate(Count("rating"))
self.assertEqual(vals, {"rating__count": 6})
def test_count_star(self):
with self.assertNumQueries(1) as ctx:
Book.objects.aggregate(n=Count("*"))
sql = ctx.captured_queries[0]["sql"]
self.assertIn("SELECT COUNT(*) ", sql)
def test_count_distinct_expression(self):
aggs = Book.objects.aggregate(
distinct_ratings=Count(
Case(When(pages__gt=300, then="rating")), distinct=True
),
)
self.assertEqual(aggs["distinct_ratings"], 4)
def test_distinct_on_aggregate(self):
for aggregate, expected_result in (
(Avg, 4.125),
(Count, 4),
(Sum, 16.5),
):
with self.subTest(aggregate=aggregate.__name__):
books = Book.objects.aggregate(
ratings=aggregate("rating", distinct=True)
)
self.assertEqual(books["ratings"], expected_result)
def test_non_grouped_annotation_not_in_group_by(self):
"""
An annotation not included in values() before an aggregate should be
excluded from the group by clause.
"""
qs = (
Book.objects.annotate(xprice=F("price"))
.filter(rating=4.0)
.values("rating")
.annotate(count=Count("publisher_id", distinct=True))
.values("count", "rating")
.order_by("count")
)
self.assertEqual(list(qs), [{"rating": 4.0, "count": 2}])
def test_grouped_annotation_in_group_by(self):
"""
An annotation included in values() before an aggregate should be
included in the group by clause.
"""
qs = (
Book.objects.annotate(xprice=F("price"))
.filter(rating=4.0)
.values("rating", "xprice")
.annotate(count=Count("publisher_id", distinct=True))
.values("count", "rating")
.order_by("count")
)
self.assertEqual(
list(qs),
[
{"rating": 4.0, "count": 1},
{"rating": 4.0, "count": 2},
],
)
def test_fkey_aggregate(self):
explicit = list(Author.objects.annotate(Count("book__id")))
implicit = list(Author.objects.annotate(Count("book")))
self.assertCountEqual(explicit, implicit)
def test_annotate_ordering(self):
books = (
Book.objects.values("rating")
.annotate(oldest=Max("authors__age"))
.order_by("oldest", "rating")
)
self.assertEqual(
list(books),
[
{"rating": 4.5, "oldest": 35},
{"rating": 3.0, "oldest": 45},
{"rating": 4.0, "oldest": 57},
{"rating": 5.0, "oldest": 57},
],
)
books = (
Book.objects.values("rating")
.annotate(oldest=Max("authors__age"))
.order_by("-oldest", "-rating")
)
self.assertEqual(
list(books),
[
{"rating": 5.0, "oldest": 57},
{"rating": 4.0, "oldest": 57},
{"rating": 3.0, "oldest": 45},
{"rating": 4.5, "oldest": 35},
],
)
def test_aggregate_annotation(self):
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(
Avg("num_authors")
)
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
def test_avg_duration_field(self):
# Explicit `output_field`.
self.assertEqual(
Publisher.objects.aggregate(Avg("duration", output_field=DurationField())),
{"duration__avg": datetime.timedelta(days=1, hours=12)},
)
# Implicit `output_field`.
self.assertEqual(
Publisher.objects.aggregate(Avg("duration")),
{"duration__avg": datetime.timedelta(days=1, hours=12)},
)
def test_sum_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Sum("duration", output_field=DurationField())),
{"duration__sum": datetime.timedelta(days=3)},
)
def test_sum_distinct_aggregate(self):
"""
Sum on a distinct() QuerySet should aggregate only the distinct items.
"""
authors = Author.objects.filter(book__in=[self.b5, self.b6])
self.assertEqual(authors.count(), 3)
distinct_authors = authors.distinct()
self.assertEqual(distinct_authors.count(), 2)
# Selected author ages are 57 and 46
age_sum = distinct_authors.aggregate(Sum("age"))
self.assertEqual(age_sum["age__sum"], 103)
def test_filtering(self):
p = Publisher.objects.create(name="Expensive Publisher", num_awards=0)
Book.objects.create(
name="ExpensiveBook1",
pages=1,
isbn="111",
rating=3.5,
price=Decimal("1000"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 1),
)
Book.objects.create(
name="ExpensiveBook2",
pages=1,
isbn="222",
rating=4.0,
price=Decimal("1000"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 2),
)
Book.objects.create(
name="ExpensiveBook3",
pages=1,
isbn="333",
rating=4.5,
price=Decimal("35"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 3),
)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
["Apress", "Prentice Hall", "Expensive Publisher"],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by(
"pk"
)
self.assertQuerysetEqual(
publishers,
[
"Apress",
"Apress",
"Sams",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1, book__price__lt=Decimal("40.0"))
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
["Apress", "Prentice Hall", "Expensive Publisher"],
lambda p: p.name,
)
publishers = (
Publisher.objects.filter(book__price__lt=Decimal("40.0"))
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerysetEqual(publishers, ["Apress"], lambda p: p.name)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__range=[1, 3])
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
[
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__range=[1, 2])
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
["Apress", "Sams", "Prentice Hall", "Morgan Kaufmann"],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__in=[1, 3])
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
["Sams", "Morgan Kaufmann", "Expensive Publisher"],
lambda p: p.name,
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(
num_books__isnull=True
)
self.assertEqual(len(publishers), 0)
def test_annotation(self):
vals = Author.objects.filter(pk=self.a1.pk).aggregate(Count("friends__id"))
self.assertEqual(vals, {"friends__id__count": 2})
books = (
Book.objects.annotate(num_authors=Count("authors__name"))
.filter(num_authors__exact=2)
.order_by("pk")
)
self.assertQuerysetEqual(
books,
[
"The Definitive Guide to Django: Web Development Done Right",
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name,
)
authors = (
Author.objects.annotate(num_friends=Count("friends__id", distinct=True))
.filter(num_friends=0)
.order_by("pk")
)
self.assertQuerysetEqual(authors, ["Brad Dayley"], lambda a: a.name)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerysetEqual(
publishers, ["Apress", "Prentice Hall"], lambda p: p.name
)
publishers = (
Publisher.objects.filter(book__price__lt=Decimal("40.0"))
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
)
self.assertQuerysetEqual(publishers, ["Apress"], lambda p: p.name)
books = Book.objects.annotate(num_authors=Count("authors__id")).filter(
authors__name__contains="Norvig", num_authors__gt=1
)
self.assertQuerysetEqual(
books, ["Artificial Intelligence: A Modern Approach"], lambda b: b.name
)
def test_more_aggregation(self):
a = Author.objects.get(name__contains="Norvig")
b = Book.objects.get(name__contains="Done Right")
b.authors.add(a)
b.save()
vals = (
Book.objects.annotate(num_authors=Count("authors__id"))
.filter(authors__name__contains="Norvig", num_authors__gt=1)
.aggregate(Avg("rating"))
)
self.assertEqual(vals, {"rating__avg": 4.25})
def test_even_more_aggregate(self):
publishers = (
Publisher.objects.annotate(
earliest_book=Min("book__pubdate"),
)
.exclude(earliest_book=None)
.order_by("earliest_book")
.values(
"earliest_book",
"num_awards",
"id",
"name",
)
)
self.assertEqual(
list(publishers),
[
{
"earliest_book": datetime.date(1991, 10, 15),
"num_awards": 9,
"id": self.p4.id,
"name": "Morgan Kaufmann",
},
{
"earliest_book": datetime.date(1995, 1, 15),
"num_awards": 7,
"id": self.p3.id,
"name": "Prentice Hall",
},
{
"earliest_book": datetime.date(2007, 12, 6),
"num_awards": 3,
"id": self.p1.id,
"name": "Apress",
},
{
"earliest_book": datetime.date(2008, 3, 3),
"num_awards": 1,
"id": self.p2.id,
"name": "Sams",
},
],
)
vals = Store.objects.aggregate(
Max("friday_night_closing"), Min("original_opening")
)
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
)
def test_annotate_values_list(self):
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("pk", "isbn", "mean_age")
)
self.assertEqual(list(books), [(self.b1.id, "159059725", 34.5)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("isbn")
)
self.assertEqual(list(books), [("159059725",)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("mean_age")
)
self.assertEqual(list(books), [(34.5,)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("mean_age", flat=True)
)
self.assertEqual(list(books), [34.5])
books = (
Book.objects.values_list("price")
.annotate(count=Count("price"))
.order_by("-count", "price")
)
self.assertEqual(
list(books),
[
(Decimal("29.69"), 2),
(Decimal("23.09"), 1),
(Decimal("30"), 1),
(Decimal("75"), 1),
(Decimal("82.8"), 1),
],
)
def test_dates_with_aggregation(self):
"""
.dates() returns a distinct set of dates when applied to a
QuerySet with aggregation.
Refs #18056. Previously, .dates() would return distinct (date_kind,
aggregation) sets, in this case (year, num_authors), so 2008 would be
returned twice because there are books from 2008 with a different
number of authors.
"""
dates = Book.objects.annotate(num_authors=Count("authors")).dates(
"pubdate", "year"
)
self.assertSequenceEqual(
dates,
[
datetime.date(1991, 1, 1),
datetime.date(1995, 1, 1),
datetime.date(2007, 1, 1),
datetime.date(2008, 1, 1),
],
)
def test_values_aggregation(self):
# Refs #20782
max_rating = Book.objects.values("rating").aggregate(max_rating=Max("rating"))
self.assertEqual(max_rating["max_rating"], 5)
max_books_per_rating = (
Book.objects.values("rating")
.annotate(books_per_rating=Count("id"))
.aggregate(Max("books_per_rating"))
)
self.assertEqual(max_books_per_rating, {"books_per_rating__max": 3})
def test_ticket17424(self):
"""
Doing exclude() on a foreign model after annotate() doesn't crash.
"""
all_books = list(Book.objects.values_list("pk", flat=True).order_by("pk"))
annotated_books = Book.objects.order_by("pk").annotate(one=Count("id"))
# The value doesn't matter, we just need any negative
# constraint on a related model that's a noop.
excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__")
# Try to generate query tree
str(excluded_books.query)
self.assertQuerysetEqual(excluded_books, all_books, lambda x: x.pk)
# Check internal state
self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type)
self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type)
def test_ticket12886(self):
"""
Aggregation over sliced queryset works correctly.
"""
qs = Book.objects.order_by("-rating")[0:3]
vals = qs.aggregate(average_top3_rating=Avg("rating"))["average_top3_rating"]
self.assertAlmostEqual(vals, 4.5, places=2)
def test_ticket11881(self):
"""
Subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE or
select_related() stuff.
"""
qs = (
Book.objects.select_for_update()
.order_by("pk")
.select_related("publisher")
.annotate(max_pk=Max("pk"))
)
with CaptureQueriesContext(connection) as captured_queries:
qs.aggregate(avg_pk=Avg("max_pk"))
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]["sql"].lower()
self.assertNotIn("for update", qstr)
forced_ordering = connection.ops.force_no_ordering()
if forced_ordering:
# If the backend needs to force an ordering we make sure it's
# the only "ORDER BY" clause present in the query.
self.assertEqual(
re.findall(r"order by (\w+)", qstr),
[", ".join(f[1][0] for f in forced_ordering).lower()],
)
else:
self.assertNotIn("order by", qstr)
self.assertEqual(qstr.count(" join "), 0)
def test_decimal_max_digits_has_no_effect(self):
Book.objects.all().delete()
a1 = Author.objects.first()
p1 = Publisher.objects.first()
thedate = timezone.now()
for i in range(10):
Book.objects.create(
isbn="abcde{}".format(i),
name="none",
pages=10,
rating=4.0,
price=9999.98,
contact=a1,
publisher=p1,
pubdate=thedate,
)
book = Book.objects.aggregate(price_sum=Sum("price"))
self.assertEqual(book["price_sum"], Decimal("99999.80"))
def test_nonaggregate_aggregation_throws(self):
with self.assertRaisesMessage(TypeError, "fail is not an aggregate expression"):
Book.objects.aggregate(fail=F("price"))
def test_nonfield_annotation(self):
book = Book.objects.annotate(val=Max(Value(2))).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(
val=Max(Value(2), output_field=IntegerField())
).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(2, output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
def test_annotation_expressions(self):
authors = Author.objects.annotate(
combined_ages=Sum(F("age") + F("friends__age"))
).order_by("name")
authors2 = Author.objects.annotate(
combined_ages=Sum("age") + Sum("friends__age")
).order_by("name")
for qs in (authors, authors2):
self.assertQuerysetEqual(
qs,
[
("Adrian Holovaty", 132),
("Brad Dayley", None),
("Jacob Kaplan-Moss", 129),
("James Bennett", 63),
("Jeffrey Forcier", 128),
("Paul Bissex", 120),
("Peter Norvig", 103),
("Stuart Russell", 103),
("Wesley J. Chun", 176),
],
lambda a: (a.name, a.combined_ages),
)
def test_aggregation_expressions(self):
a1 = Author.objects.aggregate(av_age=Sum("age") / Count("*"))
a2 = Author.objects.aggregate(av_age=Sum("age") / Count("age"))
a3 = Author.objects.aggregate(av_age=Avg("age"))
self.assertEqual(a1, {"av_age": 37})
self.assertEqual(a2, {"av_age": 37})
self.assertEqual(a3, {"av_age": Approximate(37.4, places=1)})
def test_avg_decimal_field(self):
v = Book.objects.filter(rating=4).aggregate(avg_price=(Avg("price")))[
"avg_price"
]
self.assertIsInstance(v, Decimal)
self.assertEqual(v, Approximate(Decimal("47.39"), places=2))
def test_order_of_precedence(self):
p1 = Book.objects.filter(rating=4).aggregate(avg_price=(Avg("price") + 2) * 3)
self.assertEqual(p1, {"avg_price": Approximate(Decimal("148.18"), places=2)})
p2 = Book.objects.filter(rating=4).aggregate(avg_price=Avg("price") + 2 * 3)
self.assertEqual(p2, {"avg_price": Approximate(Decimal("53.39"), places=2)})
def test_combine_different_types(self):
msg = (
"Cannot infer type of '+' expression involving these types: FloatField, "
"DecimalField. You must set output_field."
)
qs = Book.objects.annotate(sums=Sum("rating") + Sum("pages") + Sum("price"))
with self.assertRaisesMessage(FieldError, msg):
qs.first()
with self.assertRaisesMessage(FieldError, msg):
qs.first()
b1 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=IntegerField())
).get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
b2 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=FloatField())
).get(pk=self.b4.pk)
self.assertEqual(b2.sums, 383.69)
b3 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=DecimalField())
).get(pk=self.b4.pk)
self.assertEqual(b3.sums, Approximate(Decimal("383.69"), places=2))
def test_complex_aggregations_require_kwarg(self):
with self.assertRaisesMessage(
TypeError, "Complex annotations require an alias"
):
Author.objects.annotate(Sum(F("age") + F("friends__age")))
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Author.objects.aggregate(Sum("age") / Count("age"))
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Author.objects.aggregate(Sum(1))
def test_aggregate_over_complex_annotation(self):
qs = Author.objects.annotate(combined_ages=Sum(F("age") + F("friends__age")))
age = qs.aggregate(max_combined_age=Max("combined_ages"))
self.assertEqual(age["max_combined_age"], 176)
age = qs.aggregate(max_combined_age_doubled=Max("combined_ages") * 2)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages")
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages"),
sum_combined_age=Sum("combined_ages"),
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
self.assertEqual(age["sum_combined_age"], 954)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages"),
sum_combined_age_doubled=Sum("combined_ages") + Sum("combined_ages"),
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
self.assertEqual(age["sum_combined_age_doubled"], 954 * 2)
def test_values_annotation_with_expression(self):
# ensure the F() is promoted to the group by clause
qs = Author.objects.values("name").annotate(another_age=Sum("age") + F("age"))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a["another_age"], 68)
qs = qs.annotate(friend_count=Count("friends"))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a["friend_count"], 2)
qs = (
qs.annotate(combined_age=Sum("age") + F("friends__age"))
.filter(name="Adrian Holovaty")
.order_by("-combined_age")
)
self.assertEqual(
list(qs),
[
{
"name": "Adrian Holovaty",
"another_age": 68,
"friend_count": 1,
"combined_age": 69,
},
{
"name": "Adrian Holovaty",
"another_age": 68,
"friend_count": 1,
"combined_age": 63,
},
],
)
vals = qs.values("name", "combined_age")
self.assertEqual(
list(vals),
[
{"name": "Adrian Holovaty", "combined_age": 69},
{"name": "Adrian Holovaty", "combined_age": 63},
],
)
def test_annotate_values_aggregate(self):
alias_age = (
Author.objects.annotate(age_alias=F("age"))
.values(
"age_alias",
)
.aggregate(sum_age=Sum("age_alias"))
)
age = Author.objects.values("age").aggregate(sum_age=Sum("age"))
self.assertEqual(alias_age["sum_age"], age["sum_age"])
def test_annotate_over_annotate(self):
author = (
Author.objects.annotate(age_alias=F("age"))
.annotate(sum_age=Sum("age_alias"))
.get(name="Adrian Holovaty")
)
other_author = Author.objects.annotate(sum_age=Sum("age")).get(
name="Adrian Holovaty"
)
self.assertEqual(author.sum_age, other_author.sum_age)
def test_aggregate_over_aggregate(self):
msg = "Cannot compute Avg('age'): 'age' is an aggregate"
with self.assertRaisesMessage(FieldError, msg):
Author.objects.annotate(age_alias=F("age"),).aggregate(
age=Sum(F("age")),
avg_age=Avg(F("age")),
)
def test_annotated_aggregate_over_annotated_aggregate(self):
with self.assertRaisesMessage(
FieldError, "Cannot compute Sum('id__max'): 'id__max' is an aggregate"
):
Book.objects.annotate(Max("id")).annotate(Sum("id__max"))
class MyMax(Max):
def as_sql(self, compiler, connection):
self.set_source_expressions(self.get_source_expressions()[0:1])
return super().as_sql(compiler, connection)
with self.assertRaisesMessage(
FieldError, "Cannot compute Max('id__max'): 'id__max' is an aggregate"
):
Book.objects.annotate(Max("id")).annotate(my_max=MyMax("id__max", "price"))
def test_multi_arg_aggregate(self):
class MyMax(Max):
output_field = DecimalField()
def as_sql(self, compiler, connection):
copy = self.copy()
copy.set_source_expressions(copy.get_source_expressions()[0:1])
return super(MyMax, copy).as_sql(compiler, connection)
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Book.objects.aggregate(MyMax("pages", "price"))
with self.assertRaisesMessage(
TypeError, "Complex annotations require an alias"
):
Book.objects.annotate(MyMax("pages", "price"))
Book.objects.aggregate(max_field=MyMax("pages", "price"))
def test_add_implementation(self):
class MySum(Sum):
pass
# test completely changing how the output is rendered
def lower_case_function_override(self, compiler, connection):
sql, params = compiler.compile(self.source_expressions[0])
substitutions = {
"function": self.function.lower(),
"expressions": sql,
"distinct": "",
}
substitutions.update(self.extra)
return self.template % substitutions, params
setattr(MySum, "as_" + connection.vendor, lower_case_function_override)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("sum("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test changing the dict and delegating
def lower_case_function_super(self, compiler, connection):
self.extra["function"] = self.function.lower()
return super(MySum, self).as_sql(compiler, connection)
setattr(MySum, "as_" + connection.vendor, lower_case_function_super)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("sum("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test overriding all parts of the template
def be_evil(self, compiler, connection):
substitutions = {"function": "MAX", "expressions": "2", "distinct": ""}
substitutions.update(self.extra)
return self.template % substitutions, ()
setattr(MySum, "as_" + connection.vendor, be_evil)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("MAX("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 2)
def test_complex_values_aggregation(self):
max_rating = Book.objects.values("rating").aggregate(
double_max_rating=Max("rating") + Max("rating")
)
self.assertEqual(max_rating["double_max_rating"], 5 * 2)
max_books_per_rating = (
Book.objects.values("rating")
.annotate(books_per_rating=Count("id") + 5)
.aggregate(Max("books_per_rating"))
)
self.assertEqual(max_books_per_rating, {"books_per_rating__max": 3 + 5})
def test_expression_on_aggregation(self):
qs = (
Publisher.objects.annotate(
price_or_median=Greatest(
Avg("book__rating", output_field=DecimalField()), Avg("book__price")
)
)
.filter(price_or_median__gte=F("num_awards"))
.order_by("num_awards")
)
self.assertQuerysetEqual(qs, [1, 3, 7, 9], lambda v: v.num_awards)
qs2 = (
Publisher.objects.annotate(
rating_or_num_awards=Greatest(
Avg("book__rating"), F("num_awards"), output_field=FloatField()
)
)
.filter(rating_or_num_awards__gt=F("num_awards"))
.order_by("num_awards")
)
self.assertQuerysetEqual(qs2, [1, 3], lambda v: v.num_awards)
def test_arguments_must_be_expressions(self):
msg = "QuerySet.aggregate() received non-expression(s): %s."
with self.assertRaisesMessage(TypeError, msg % FloatField()):
Book.objects.aggregate(FloatField())
with self.assertRaisesMessage(TypeError, msg % True):
Book.objects.aggregate(is_book=True)
with self.assertRaisesMessage(
TypeError, msg % ", ".join([str(FloatField()), "True"])
):
Book.objects.aggregate(FloatField(), Avg("price"), is_book=True)
def test_aggregation_subquery_annotation(self):
"""Subquery annotations are excluded from the GROUP BY if they are
not explicitly grouped against."""
latest_book_pubdate_qs = (
Book.objects.filter(publisher=OuterRef("pk"))
.order_by("-pubdate")
.values("pubdate")[:1]
)
publisher_qs = Publisher.objects.annotate(
latest_book_pubdate=Subquery(latest_book_pubdate_qs),
).annotate(count=Count("book"))
with self.assertNumQueries(1) as ctx:
list(publisher_qs)
self.assertEqual(ctx[0]["sql"].count("SELECT"), 2)
# The GROUP BY should not be by alias either.
self.assertEqual(ctx[0]["sql"].lower().count("latest_book_pubdate"), 1)
def test_aggregation_subquery_annotation_exists(self):
latest_book_pubdate_qs = (
Book.objects.filter(publisher=OuterRef("pk"))
.order_by("-pubdate")
.values("pubdate")[:1]
)
publisher_qs = Publisher.objects.annotate(
latest_book_pubdate=Subquery(latest_book_pubdate_qs),
count=Count("book"),
)
self.assertTrue(publisher_qs.exists())
def test_aggregation_exists_annotation(self):
published_books = Book.objects.filter(publisher=OuterRef("pk"))
publisher_qs = Publisher.objects.annotate(
published_book=Exists(published_books),
count=Count("book"),
).values_list("name", flat=True)
self.assertCountEqual(
list(publisher_qs),
[
"Apress",
"Morgan Kaufmann",
"Jonno's House of Books",
"Prentice Hall",
"Sams",
],
)
def test_aggregation_subquery_annotation_values(self):
"""
Subquery annotations and external aliases are excluded from the GROUP
BY if they are not selected.
"""
books_qs = (
Book.objects.annotate(
first_author_the_same_age=Subquery(
Author.objects.filter(
age=OuterRef("contact__friends__age"),
)
.order_by("age")
.values("id")[:1],
)
)
.filter(
publisher=self.p1,
first_author_the_same_age__isnull=False,
)
.annotate(
min_age=Min("contact__friends__age"),
)
.values("name", "min_age")
.order_by("name")
)
self.assertEqual(
list(books_qs),
[
{"name": "Practical Django Projects", "min_age": 34},
{
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"min_age": 29,
},
],
)
def test_aggregation_subquery_annotation_values_collision(self):
books_rating_qs = Book.objects.filter(
publisher=OuterRef("pk"),
price=Decimal("29.69"),
).values("rating")
publisher_qs = (
Publisher.objects.filter(
book__contact__age__gt=20,
name=self.p1.name,
)
.annotate(
rating=Subquery(books_rating_qs),
contacts_count=Count("book__contact"),
)
.values("rating")
.annotate(total_count=Count("rating"))
)
self.assertEqual(
list(publisher_qs),
[
{"rating": 4.0, "total_count": 2},
],
)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_subquery_annotation_multivalued(self):
"""
Subquery annotations must be included in the GROUP BY if they use
potentially multivalued relations (contain the LOOKUP_SEP).
"""
subquery_qs = Author.objects.filter(
pk=OuterRef("pk"),
book__name=OuterRef("book__name"),
).values("pk")
author_qs = Author.objects.annotate(
subquery_id=Subquery(subquery_qs),
).annotate(count=Count("book"))
self.assertEqual(author_qs.count(), Author.objects.count())
def test_aggregation_order_by_not_selected_annotation_values(self):
result_asc = [
self.b4.pk,
self.b3.pk,
self.b1.pk,
self.b2.pk,
self.b5.pk,
self.b6.pk,
]
result_desc = result_asc[::-1]
tests = [
("min_related_age", result_asc),
("-min_related_age", result_desc),
(F("min_related_age"), result_asc),
(F("min_related_age").asc(), result_asc),
(F("min_related_age").desc(), result_desc),
]
for ordering, expected_result in tests:
with self.subTest(ordering=ordering):
books_qs = (
Book.objects.annotate(
min_age=Min("authors__age"),
)
.annotate(
min_related_age=Coalesce("min_age", "contact__age"),
)
.order_by(ordering)
.values_list("pk", flat=True)
)
self.assertEqual(list(books_qs), expected_result)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_group_by_subquery_annotation(self):
"""
Subquery annotations are included in the GROUP BY if they are
grouped against.
"""
long_books_count_qs = (
Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=400,
)
.values("publisher")
.annotate(count=Count("pk"))
.values("count")
)
groups = [
Subquery(long_books_count_qs),
long_books_count_qs,
long_books_count_qs.query,
]
for group in groups:
with self.subTest(group=group.__class__.__name__):
long_books_count_breakdown = Publisher.objects.values_list(
group,
).annotate(total=Count("*"))
self.assertEqual(dict(long_books_count_breakdown), {None: 1, 1: 4})
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_group_by_exists_annotation(self):
"""
Exists annotations are included in the GROUP BY if they are
grouped against.
"""
long_books_qs = Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=800,
)
has_long_books_breakdown = Publisher.objects.values_list(
Exists(long_books_qs),
).annotate(total=Count("*"))
self.assertEqual(dict(has_long_books_breakdown), {True: 2, False: 3})
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_subquery_annotation_related_field(self):
publisher = Publisher.objects.create(name=self.a9.name, num_awards=2)
book = Book.objects.create(
isbn="159059999",
name="Test book.",
pages=819,
rating=2.5,
price=Decimal("14.44"),
contact=self.a9,
publisher=publisher,
pubdate=datetime.date(2019, 12, 6),
)
book.authors.add(self.a5, self.a6, self.a7)
books_qs = (
Book.objects.annotate(
contact_publisher=Subquery(
Publisher.objects.filter(
pk=OuterRef("publisher"),
name=OuterRef("contact__name"),
).values("name")[:1],
)
)
.filter(
contact_publisher__isnull=False,
)
.annotate(count=Count("authors"))
)
self.assertSequenceEqual(books_qs, [book])
# FIXME: GROUP BY doesn't need to include a subquery with
# non-multivalued JOINs, see Col.possibly_multivalued (refs #31150):
# with self.assertNumQueries(1) as ctx:
# self.assertSequenceEqual(books_qs, [book])
# self.assertEqual(ctx[0]['sql'].count('SELECT'), 2)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_nested_subquery_outerref(self):
publisher_with_same_name = Publisher.objects.filter(
id__in=Subquery(
Publisher.objects.filter(
name=OuterRef(OuterRef("publisher__name")),
).values("id"),
),
).values(publisher_count=Count("id"))[:1]
books_breakdown = Book.objects.annotate(
publisher_count=Subquery(publisher_with_same_name),
authors_count=Count("authors"),
).values_list("publisher_count", flat=True)
self.assertSequenceEqual(books_breakdown, [1] * 6)
def test_filter_in_subquery_or_aggregation(self):
"""
Filtering against an aggregate requires the usage of the HAVING clause.
If such a filter is unionized to a non-aggregate one the latter will
also need to be moved to the HAVING clause and have its grouping
columns used in the GROUP BY.
When this is done with a subquery the specialized logic in charge of
using outer reference columns to group should be used instead of the
subquery itself as the latter might return multiple rows.
"""
authors = Author.objects.annotate(
Count("book"),
).filter(Q(book__count__gt=0) | Q(pk__in=Book.objects.values("authors")))
self.assertQuerysetEqual(authors, Author.objects.all(), ordered=False)
def test_aggregation_random_ordering(self):
"""Random() is not included in the GROUP BY when used for ordering."""
authors = Author.objects.annotate(contact_count=Count("book")).order_by("?")
self.assertQuerysetEqual(
authors,
[
("Adrian Holovaty", 1),
("Jacob Kaplan-Moss", 1),
("Brad Dayley", 1),
("James Bennett", 1),
("Jeffrey Forcier", 1),
("Paul Bissex", 1),
("Wesley J. Chun", 1),
("Stuart Russell", 1),
("Peter Norvig", 2),
],
lambda a: (a.name, a.contact_count),
ordered=False,
)
def test_empty_result_optimization(self):
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Sum("num_awards"),
books_count=Count("book"),
),
{
"sum_awards": None,
"books_count": 0,
},
)
# Expression without empty_result_set_value forces queries to be
# executed even if they would return an empty result set.
raw_books_count = Func("book", function="COUNT")
raw_books_count.contains_aggregate = True
with self.assertNumQueries(1):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Sum("num_awards"),
books_count=raw_books_count,
),
{
"sum_awards": None,
"books_count": 0,
},
)
def test_coalesced_empty_result_set(self):
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Sum("num_awards"), 0),
)["sum_awards"],
0,
)
# Multiple expressions.
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Sum("num_awards"), None, 0),
)["sum_awards"],
0,
)
# Nested coalesce.
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Coalesce(Sum("num_awards"), None), 0),
)["sum_awards"],
0,
)
# Expression coalesce.
with self.assertNumQueries(1):
self.assertIsInstance(
Store.objects.none().aggregate(
latest_opening=Coalesce(
Max("original_opening"),
RawSQL("CURRENT_TIMESTAMP", []),
),
)["latest_opening"],
datetime.datetime,
)
def test_aggregation_default_unsupported_by_count(self):
msg = "Count does not allow default."
with self.assertRaisesMessage(TypeError, msg):
Count("age", default=0)
def test_aggregation_default_unset(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age"),
)
self.assertIsNone(result["value"])
def test_aggregation_default_zero(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=0),
)
self.assertEqual(result["value"], 0)
def test_aggregation_default_integer(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=21),
)
self.assertEqual(result["value"], 21)
def test_aggregation_default_expression(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=Value(5) * Value(7)),
)
self.assertEqual(result["value"], 35)
def test_aggregation_default_group_by(self):
qs = (
Publisher.objects.values("name")
.annotate(
books=Count("book"),
pages=Sum("book__pages", default=0),
)
.filter(books=0)
)
self.assertSequenceEqual(
qs,
[{"name": "Jonno's House of Books", "books": 0, "pages": 0}],
)
def test_aggregation_default_compound_expression(self):
# Scale rating to a percentage; default to 50% if no books published.
formula = Avg("book__rating", default=2.5) * 20.0
queryset = Publisher.objects.annotate(rating=formula).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "rating"),
[
{"name": "Apress", "rating": 85.0},
{"name": "Jonno's House of Books", "rating": 50.0},
{"name": "Morgan Kaufmann", "rating": 100.0},
{"name": "Prentice Hall", "rating": 80.0},
{"name": "Sams", "rating": 60.0},
],
)
def test_aggregation_default_using_time_from_python(self):
expr = Min(
"store__friday_night_closing",
filter=~Q(store__name="Amazon.com"),
default=datetime.time(17),
)
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL 8.0+ & MariaDB.
expr.default = Cast(expr.default, TimeField())
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{"isbn": "013235613", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "013790395",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "067232959", "oldest_store_opening": datetime.time(17)},
{"isbn": "155860191", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "159059725",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "159059996", "oldest_store_opening": datetime.time(21, 30)},
],
)
def test_aggregation_default_using_time_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min(
"store__friday_night_closing",
filter=~Q(store__name="Amazon.com"),
default=TruncHour(NowUTC(), output_field=TimeField()),
)
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{"isbn": "013235613", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "013790395",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "067232959", "oldest_store_opening": datetime.time(now.hour)},
{"isbn": "155860191", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "159059725",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "159059996", "oldest_store_opening": datetime.time(21, 30)},
],
)
def test_aggregation_default_using_date_from_python(self):
expr = Min("book__pubdate", default=datetime.date(1970, 1, 1))
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL 5.7+ & MariaDB.
expr.default = Cast(expr.default, DateField())
queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "earliest_pubdate"),
[
{"name": "Apress", "earliest_pubdate": datetime.date(2007, 12, 6)},
{
"name": "Jonno's House of Books",
"earliest_pubdate": datetime.date(1970, 1, 1),
},
{
"name": "Morgan Kaufmann",
"earliest_pubdate": datetime.date(1991, 10, 15),
},
{
"name": "Prentice Hall",
"earliest_pubdate": datetime.date(1995, 1, 15),
},
{"name": "Sams", "earliest_pubdate": datetime.date(2008, 3, 3)},
],
)
def test_aggregation_default_using_date_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min("book__pubdate", default=TruncDate(NowUTC()))
queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "earliest_pubdate"),
[
{"name": "Apress", "earliest_pubdate": datetime.date(2007, 12, 6)},
{"name": "Jonno's House of Books", "earliest_pubdate": now.date()},
{
"name": "Morgan Kaufmann",
"earliest_pubdate": datetime.date(1991, 10, 15),
},
{
"name": "Prentice Hall",
"earliest_pubdate": datetime.date(1995, 1, 15),
},
{"name": "Sams", "earliest_pubdate": datetime.date(2008, 3, 3)},
],
)
def test_aggregation_default_using_datetime_from_python(self):
expr = Min(
"store__original_opening",
filter=~Q(store__name="Amazon.com"),
default=datetime.datetime(1970, 1, 1),
)
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL 8.0+ & MariaDB.
expr.default = Cast(expr.default, DateTimeField())
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{
"isbn": "013235613",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "013790395",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "067232959",
"oldest_store_opening": datetime.datetime(1970, 1, 1),
},
{
"isbn": "155860191",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "159059725",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "159059996",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
],
)
def test_aggregation_default_using_datetime_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min(
"store__original_opening",
filter=~Q(store__name="Amazon.com"),
default=TruncHour(NowUTC(), output_field=DateTimeField()),
)
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{
"isbn": "013235613",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "013790395",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "067232959",
"oldest_store_opening": now.replace(
minute=0, second=0, microsecond=0, tzinfo=None
),
},
{
"isbn": "155860191",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "159059725",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "159059996",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
],
)
def test_aggregation_default_using_duration_from_python(self):
result = Publisher.objects.filter(num_awards__gt=3).aggregate(
value=Sum("duration", default=datetime.timedelta(0)),
)
self.assertEqual(result["value"], datetime.timedelta(0))
def test_aggregation_default_using_duration_from_database(self):
result = Publisher.objects.filter(num_awards__gt=3).aggregate(
value=Sum("duration", default=Now() - Now()),
)
self.assertEqual(result["value"], datetime.timedelta(0))
def test_aggregation_default_using_decimal_from_python(self):
result = Book.objects.filter(rating__lt=3.0).aggregate(
value=Sum("price", default=Decimal("0.00")),
)
self.assertEqual(result["value"], Decimal("0.00"))
def test_aggregation_default_using_decimal_from_database(self):
result = Book.objects.filter(rating__lt=3.0).aggregate(
value=Sum("price", default=Pi()),
)
self.assertAlmostEqual(result["value"], Decimal.from_float(math.pi), places=6)
def test_aggregation_default_passed_another_aggregate(self):
result = Book.objects.aggregate(
value=Sum("price", filter=Q(rating__lt=3.0), default=Avg("pages") / 10.0),
)
self.assertAlmostEqual(result["value"], Decimal("61.72"), places=2)
def test_aggregation_default_after_annotation(self):
result = Publisher.objects.annotate(
double_num_awards=F("num_awards") * 2,
).aggregate(value=Sum("double_num_awards", default=0))
self.assertEqual(result["value"], 40)
def test_aggregation_default_not_in_aggregate(self):
result = Publisher.objects.annotate(
avg_rating=Avg("book__rating", default=2.5),
).aggregate(Sum("num_awards"))
self.assertEqual(result["num_awards__sum"], 20)
def test_exists_none_with_aggregate(self):
qs = Book.objects.annotate(
count=Count("id"),
exists=Exists(Author.objects.none()),
)
self.assertEqual(len(qs), 6)
def test_alias_sql_injection(self):
crafted_alias = """injected_name" from "aggregation_author"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Author.objects.aggregate(**{crafted_alias: Avg("age")})
def test_exists_extra_where_with_aggregate(self):
qs = Book.objects.annotate(
count=Count("id"),
exists=Exists(Author.objects.extra(where=["1=0"])),
)
self.assertEqual(len(qs), 6)
|
138966fa3bd32c868cdc12afac9b9163de6c723d4fa0a79890ee9b24d02e81ea | import datetime
from decimal import Decimal
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db.models import (
BooleanField,
Case,
CharField,
Count,
DateTimeField,
DecimalField,
Exists,
ExpressionWrapper,
F,
FloatField,
Func,
IntegerField,
Max,
OuterRef,
Q,
Subquery,
Sum,
Value,
When,
)
from django.db.models.expressions import RawSQL
from django.db.models.functions import Coalesce, ExtractYear, Floor, Length, Lower, Trim
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import register_lookup
from .models import (
Author,
Book,
Company,
DepartmentStore,
Employee,
Publisher,
Store,
Ticket,
)
class NonAggregateAnnotationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="Brad Dayley", age=45)
cls.a4 = Author.objects.create(name="James Bennett", age=29)
cls.a5 = Author.objects.create(name="Jeffrey Forcier", age=37)
cls.a6 = Author.objects.create(name="Paul Bissex", age=29)
cls.a7 = Author.objects.create(name="Wesley J. Chun", age=25)
cls.a8 = Author.objects.create(name="Peter Norvig", age=57)
cls.a9 = Author.objects.create(name="Stuart Russell", age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name="Apress", num_awards=3)
cls.p2 = Publisher.objects.create(name="Sams", num_awards=1)
cls.p3 = Publisher.objects.create(name="Prentice Hall", num_awards=7)
cls.p4 = Publisher.objects.create(name="Morgan Kaufmann", num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn="159059725",
name="The Definitive Guide to Django: Web Development Done Right",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6),
)
cls.b2 = Book.objects.create(
isbn="067232959",
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
rating=3.0,
price=Decimal("23.09"),
contact=cls.a3,
publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3),
)
cls.b3 = Book.objects.create(
isbn="159059996",
name="Practical Django Projects",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a4,
publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23),
)
cls.b4 = Book.objects.create(
isbn="013235613",
name="Python Web Development with Django",
pages=350,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a5,
publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3),
)
cls.b5 = Book.objects.create(
isbn="013790395",
name="Artificial Intelligence: A Modern Approach",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a8,
publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15),
)
cls.b6 = Book.objects.create(
isbn="155860191",
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a8,
publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15),
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
cls.s1 = Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
cls.s2 = Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
cls.s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30),
)
cls.s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
cls.s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
cls.s3.books.add(cls.b3, cls.b4, cls.b6)
def test_basic_annotation(self):
books = Book.objects.annotate(is_book=Value(1))
for book in books:
self.assertEqual(book.is_book, 1)
def test_basic_f_annotation(self):
books = Book.objects.annotate(another_rating=F("rating"))
for book in books:
self.assertEqual(book.another_rating, book.rating)
def test_joined_annotation(self):
books = Book.objects.select_related("publisher").annotate(
num_awards=F("publisher__num_awards")
)
for book in books:
self.assertEqual(book.num_awards, book.publisher.num_awards)
def test_joined_transformed_annotation(self):
Employee.objects.bulk_create(
[
Employee(
first_name="John",
last_name="Doe",
age=18,
store=self.s1,
salary=15000,
),
Employee(
first_name="Jane",
last_name="Jones",
age=30,
store=self.s2,
salary=30000,
),
Employee(
first_name="Jo",
last_name="Smith",
age=55,
store=self.s3,
salary=50000,
),
]
)
employees = Employee.objects.annotate(
store_opened_year=F("store__original_opening__year"),
)
for employee in employees:
self.assertEqual(
employee.store_opened_year,
employee.store.original_opening.year,
)
def test_custom_transform_annotation(self):
with register_lookup(DecimalField, Floor):
books = Book.objects.annotate(floor_price=F("price__floor"))
self.assertSequenceEqual(
books.values_list("pk", "floor_price"),
[
(self.b1.pk, 30),
(self.b2.pk, 23),
(self.b3.pk, 29),
(self.b4.pk, 29),
(self.b5.pk, 82),
(self.b6.pk, 75),
],
)
def test_chaining_transforms(self):
Company.objects.create(name=" Django Software Foundation ")
Company.objects.create(name="Yahoo")
with register_lookup(CharField, Trim), register_lookup(CharField, Length):
for expr in [Length("name__trim"), F("name__trim__length")]:
with self.subTest(expr=expr):
self.assertCountEqual(
Company.objects.annotate(length=expr).values("name", "length"),
[
{"name": " Django Software Foundation ", "length": 26},
{"name": "Yahoo", "length": 5},
],
)
def test_mixed_type_annotation_date_interval(self):
active = datetime.datetime(2015, 3, 20, 14, 0, 0)
duration = datetime.timedelta(hours=1)
expires = datetime.datetime(2015, 3, 20, 14, 0, 0) + duration
Ticket.objects.create(active_at=active, duration=duration)
t = Ticket.objects.annotate(
expires=ExpressionWrapper(
F("active_at") + F("duration"), output_field=DateTimeField()
)
).first()
self.assertEqual(t.expires, expires)
def test_mixed_type_annotation_numbers(self):
test = self.b1
b = Book.objects.annotate(
combined=ExpressionWrapper(
F("pages") + F("rating"), output_field=IntegerField()
)
).get(isbn=test.isbn)
combined = int(test.pages + test.rating)
self.assertEqual(b.combined, combined)
def test_empty_expression_annotation(self):
books = Book.objects.annotate(
selected=ExpressionWrapper(Q(pk__in=[]), output_field=BooleanField())
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(not book.selected for book in books))
books = Book.objects.annotate(
selected=ExpressionWrapper(
Q(pk__in=Book.objects.none()), output_field=BooleanField()
)
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(not book.selected for book in books))
def test_full_expression_annotation(self):
books = Book.objects.annotate(
selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()),
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(book.selected for book in books))
def test_full_expression_annotation_with_aggregation(self):
qs = Book.objects.filter(isbn="159059725").annotate(
selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()),
rating_count=Count("rating"),
)
self.assertEqual([book.rating_count for book in qs], [1])
def test_aggregate_over_full_expression_annotation(self):
qs = Book.objects.annotate(
selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()),
).aggregate(Sum("selected"))
self.assertEqual(qs["selected__sum"], Book.objects.count())
def test_empty_queryset_annotation(self):
qs = Author.objects.annotate(empty=Subquery(Author.objects.values("id").none()))
self.assertIsNone(qs.first().empty)
def test_annotate_with_aggregation(self):
books = Book.objects.annotate(is_book=Value(1), rating_count=Count("rating"))
for book in books:
self.assertEqual(book.is_book, 1)
self.assertEqual(book.rating_count, 1)
def test_combined_expression_annotation_with_aggregation(self):
book = Book.objects.annotate(
combined=ExpressionWrapper(
Value(3) * Value(4), output_field=IntegerField()
),
rating_count=Count("rating"),
).first()
self.assertEqual(book.combined, 12)
self.assertEqual(book.rating_count, 1)
def test_combined_f_expression_annotation_with_aggregation(self):
book = (
Book.objects.filter(isbn="159059725")
.annotate(
combined=ExpressionWrapper(
F("price") * F("pages"), output_field=FloatField()
),
rating_count=Count("rating"),
)
.first()
)
self.assertEqual(book.combined, 13410.0)
self.assertEqual(book.rating_count, 1)
@skipUnlessDBFeature("supports_boolean_expr_in_select_clause")
def test_q_expression_annotation_with_aggregation(self):
book = (
Book.objects.filter(isbn="159059725")
.annotate(
isnull_pubdate=ExpressionWrapper(
Q(pubdate__isnull=True),
output_field=BooleanField(),
),
rating_count=Count("rating"),
)
.first()
)
self.assertIs(book.isnull_pubdate, False)
self.assertEqual(book.rating_count, 1)
@skipUnlessDBFeature("supports_boolean_expr_in_select_clause")
def test_grouping_by_q_expression_annotation(self):
authors = (
Author.objects.annotate(
under_40=ExpressionWrapper(Q(age__lt=40), output_field=BooleanField()),
)
.values("under_40")
.annotate(
count_id=Count("id"),
)
.values("under_40", "count_id")
)
self.assertCountEqual(
authors,
[
{"under_40": False, "count_id": 3},
{"under_40": True, "count_id": 6},
],
)
def test_aggregate_over_annotation(self):
agg = Author.objects.annotate(other_age=F("age")).aggregate(
otherage_sum=Sum("other_age")
)
other_agg = Author.objects.aggregate(age_sum=Sum("age"))
self.assertEqual(agg["otherage_sum"], other_agg["age_sum"])
@skipUnlessDBFeature("can_distinct_on_fields")
def test_distinct_on_with_annotation(self):
store = Store.objects.create(
name="test store",
original_opening=datetime.datetime.now(),
friday_night_closing=datetime.time(21, 00, 00),
)
names = [
"Theodore Roosevelt",
"Eleanor Roosevelt",
"Franklin Roosevelt",
"Ned Stark",
"Catelyn Stark",
]
for name in names:
Employee.objects.create(
store=store,
first_name=name.split()[0],
last_name=name.split()[1],
age=30,
salary=2000,
)
people = Employee.objects.annotate(
name_lower=Lower("last_name"),
).distinct("name_lower")
self.assertEqual({p.last_name for p in people}, {"Stark", "Roosevelt"})
self.assertEqual(len(people), 2)
people2 = Employee.objects.annotate(
test_alias=F("store__name"),
).distinct("test_alias")
self.assertEqual(len(people2), 1)
lengths = (
Employee.objects.annotate(
name_len=Length("first_name"),
)
.distinct("name_len")
.values_list("name_len", flat=True)
)
self.assertCountEqual(lengths, [3, 7, 8])
def test_filter_annotation(self):
books = Book.objects.annotate(is_book=Value(1)).filter(is_book=1)
for book in books:
self.assertEqual(book.is_book, 1)
def test_filter_annotation_with_f(self):
books = Book.objects.annotate(other_rating=F("rating")).filter(other_rating=3.5)
for book in books:
self.assertEqual(book.other_rating, 3.5)
def test_filter_annotation_with_double_f(self):
books = Book.objects.annotate(other_rating=F("rating")).filter(
other_rating=F("rating")
)
for book in books:
self.assertEqual(book.other_rating, book.rating)
def test_filter_agg_with_double_f(self):
books = Book.objects.annotate(sum_rating=Sum("rating")).filter(
sum_rating=F("sum_rating")
)
for book in books:
self.assertEqual(book.sum_rating, book.rating)
def test_filter_wrong_annotation(self):
with self.assertRaisesMessage(
FieldError, "Cannot resolve keyword 'nope' into field."
):
list(
Book.objects.annotate(sum_rating=Sum("rating")).filter(
sum_rating=F("nope")
)
)
def test_decimal_annotation(self):
salary = Decimal(10) ** -Employee._meta.get_field("salary").decimal_places
Employee.objects.create(
first_name="Max",
last_name="Paine",
store=Store.objects.first(),
age=23,
salary=salary,
)
self.assertEqual(
Employee.objects.annotate(new_salary=F("salary") / 10).get().new_salary,
salary / 10,
)
def test_filter_decimal_annotation(self):
qs = (
Book.objects.annotate(new_price=F("price") + 1)
.filter(new_price=Decimal(31))
.values_list("new_price")
)
self.assertEqual(qs.get(), (Decimal(31),))
def test_combined_annotation_commutative(self):
book1 = Book.objects.annotate(adjusted_rating=F("rating") + 2).get(
pk=self.b1.pk
)
book2 = Book.objects.annotate(adjusted_rating=2 + F("rating")).get(
pk=self.b1.pk
)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
book1 = Book.objects.annotate(adjusted_rating=F("rating") + None).get(
pk=self.b1.pk
)
book2 = Book.objects.annotate(adjusted_rating=None + F("rating")).get(
pk=self.b1.pk
)
self.assertIs(book1.adjusted_rating, None)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
def test_update_with_annotation(self):
book_preupdate = Book.objects.get(pk=self.b2.pk)
Book.objects.annotate(other_rating=F("rating") - 1).update(
rating=F("other_rating")
)
book_postupdate = Book.objects.get(pk=self.b2.pk)
self.assertEqual(book_preupdate.rating - 1, book_postupdate.rating)
def test_annotation_with_m2m(self):
books = (
Book.objects.annotate(author_age=F("authors__age"))
.filter(pk=self.b1.pk)
.order_by("author_age")
)
self.assertEqual(books[0].author_age, 34)
self.assertEqual(books[1].author_age, 35)
def test_annotation_reverse_m2m(self):
books = (
Book.objects.annotate(
store_name=F("store__name"),
)
.filter(
name="Practical Django Projects",
)
.order_by("store_name")
)
self.assertQuerysetEqual(
books,
["Amazon.com", "Books.com", "Mamma and Pappa's Books"],
lambda b: b.store_name,
)
def test_values_annotation(self):
"""
Annotations can reference fields in a values clause,
and contribute to an existing values clause.
"""
# annotate references a field in values()
qs = Book.objects.values("rating").annotate(other_rating=F("rating") - 1)
book = qs.get(pk=self.b1.pk)
self.assertEqual(book["rating"] - 1, book["other_rating"])
# filter refs the annotated value
book = qs.get(other_rating=4)
self.assertEqual(book["other_rating"], 4)
# can annotate an existing values with a new field
book = qs.annotate(other_isbn=F("isbn")).get(other_rating=4)
self.assertEqual(book["other_rating"], 4)
self.assertEqual(book["other_isbn"], "155860191")
def test_values_with_pk_annotation(self):
# annotate references a field in values() with pk
publishers = Publisher.objects.values("id", "book__rating").annotate(
total=Sum("book__rating")
)
for publisher in publishers.filter(pk=self.p1.pk):
self.assertEqual(publisher["book__rating"], publisher["total"])
@skipUnlessDBFeature("allows_group_by_pk")
def test_rawsql_group_by_collapse(self):
raw = RawSQL("SELECT MIN(id) FROM annotations_book", [])
qs = (
Author.objects.values("id")
.annotate(
min_book_id=raw,
count_friends=Count("friends"),
)
.order_by()
)
_, _, group_by = qs.query.get_compiler(using="default").pre_sql_setup()
self.assertEqual(len(group_by), 1)
self.assertNotEqual(raw, group_by[0])
def test_defer_annotation(self):
"""
Deferred attributes can be referenced by an annotation,
but they are not themselves deferred, and cannot be deferred.
"""
qs = Book.objects.defer("rating").annotate(other_rating=F("rating") - 1)
with self.assertNumQueries(2):
book = qs.get(other_rating=4)
self.assertEqual(book.rating, 5)
self.assertEqual(book.other_rating, 4)
with self.assertRaisesMessage(
FieldDoesNotExist, "Book has no field named 'other_rating'"
):
book = qs.defer("other_rating").get(other_rating=4)
def test_mti_annotations(self):
"""
Fields on an inherited model can be referenced by an
annotated field.
"""
d = DepartmentStore.objects.create(
name="Angus & Robinson",
original_opening=datetime.date(2014, 3, 8),
friday_night_closing=datetime.time(21, 00, 00),
chain="Westfield",
)
books = Book.objects.filter(rating__gt=4)
for b in books:
d.books.add(b)
qs = (
DepartmentStore.objects.annotate(
other_name=F("name"),
other_chain=F("chain"),
is_open=Value(True, BooleanField()),
book_isbn=F("books__isbn"),
)
.order_by("book_isbn")
.filter(chain="Westfield")
)
self.assertQuerysetEqual(
qs,
[
("Angus & Robinson", "Westfield", True, "155860191"),
("Angus & Robinson", "Westfield", True, "159059725"),
],
lambda d: (d.other_name, d.other_chain, d.is_open, d.book_isbn),
)
def test_null_annotation(self):
"""
Annotating None onto a model round-trips
"""
book = Book.objects.annotate(
no_value=Value(None, output_field=IntegerField())
).first()
self.assertIsNone(book.no_value)
def test_order_by_annotation(self):
authors = Author.objects.annotate(other_age=F("age")).order_by("other_age")
self.assertQuerysetEqual(
authors,
[
25,
29,
29,
34,
35,
37,
45,
46,
57,
],
lambda a: a.other_age,
)
def test_order_by_aggregate(self):
authors = (
Author.objects.values("age")
.annotate(age_count=Count("age"))
.order_by("age_count", "age")
)
self.assertQuerysetEqual(
authors,
[
(25, 1),
(34, 1),
(35, 1),
(37, 1),
(45, 1),
(46, 1),
(57, 1),
(29, 2),
],
lambda a: (a["age"], a["age_count"]),
)
def test_raw_sql_with_inherited_field(self):
DepartmentStore.objects.create(
name="Angus & Robinson",
original_opening=datetime.date(2014, 3, 8),
friday_night_closing=datetime.time(21),
chain="Westfield",
area=123,
)
tests = (
("name", "Angus & Robinson"),
("surface", 123),
("case when name='Angus & Robinson' then chain else name end", "Westfield"),
)
for sql, expected_result in tests:
with self.subTest(sql=sql):
self.assertSequenceEqual(
DepartmentStore.objects.annotate(
annotation=RawSQL(sql, ()),
).values_list("annotation", flat=True),
[expected_result],
)
def test_annotate_exists(self):
authors = Author.objects.annotate(c=Count("id")).filter(c__gt=1)
self.assertFalse(authors.exists())
def test_column_field_ordering(self):
"""
Columns are aligned in the correct order for resolve_columns. This test
will fail on MySQL if column ordering is out. Column fields should be
aligned as:
1. extra_select
2. model_fields
3. annotation_fields
4. model_related_fields
"""
store = Store.objects.first()
Employee.objects.create(
id=1,
first_name="Max",
manager=True,
last_name="Paine",
store=store,
age=23,
salary=Decimal(50000.00),
)
Employee.objects.create(
id=2,
first_name="Buffy",
manager=False,
last_name="Summers",
store=store,
age=18,
salary=Decimal(40000.00),
)
qs = (
Employee.objects.extra(select={"random_value": "42"})
.select_related("store")
.annotate(
annotated_value=Value(17),
)
)
rows = [
(1, "Max", True, 42, "Paine", 23, Decimal(50000.00), store.name, 17),
(2, "Buffy", False, 42, "Summers", 18, Decimal(40000.00), store.name, 17),
]
self.assertQuerysetEqual(
qs.order_by("id"),
rows,
lambda e: (
e.id,
e.first_name,
e.manager,
e.random_value,
e.last_name,
e.age,
e.salary,
e.store.name,
e.annotated_value,
),
)
def test_column_field_ordering_with_deferred(self):
store = Store.objects.first()
Employee.objects.create(
id=1,
first_name="Max",
manager=True,
last_name="Paine",
store=store,
age=23,
salary=Decimal(50000.00),
)
Employee.objects.create(
id=2,
first_name="Buffy",
manager=False,
last_name="Summers",
store=store,
age=18,
salary=Decimal(40000.00),
)
qs = (
Employee.objects.extra(select={"random_value": "42"})
.select_related("store")
.annotate(
annotated_value=Value(17),
)
)
rows = [
(1, "Max", True, 42, "Paine", 23, Decimal(50000.00), store.name, 17),
(2, "Buffy", False, 42, "Summers", 18, Decimal(40000.00), store.name, 17),
]
# and we respect deferred columns!
self.assertQuerysetEqual(
qs.defer("age").order_by("id"),
rows,
lambda e: (
e.id,
e.first_name,
e.manager,
e.random_value,
e.last_name,
e.age,
e.salary,
e.store.name,
e.annotated_value,
),
)
def test_custom_functions(self):
Company(
name="Apple",
motto=None,
ticker_name="APPL",
description="Beautiful Devices",
).save()
Company(
name="Django Software Foundation",
motto=None,
ticker_name=None,
description=None,
).save()
Company(
name="Google",
motto="Do No Evil",
ticker_name="GOOG",
description="Internet Company",
).save()
Company(
name="Yahoo", motto=None, ticker_name=None, description="Internet Company"
).save()
qs = Company.objects.annotate(
tagline=Func(
F("motto"),
F("ticker_name"),
F("description"),
Value("No Tag"),
function="COALESCE",
)
).order_by("name")
self.assertQuerysetEqual(
qs,
[
("Apple", "APPL"),
("Django Software Foundation", "No Tag"),
("Google", "Do No Evil"),
("Yahoo", "Internet Company"),
],
lambda c: (c.name, c.tagline),
)
def test_custom_functions_can_ref_other_functions(self):
Company(
name="Apple",
motto=None,
ticker_name="APPL",
description="Beautiful Devices",
).save()
Company(
name="Django Software Foundation",
motto=None,
ticker_name=None,
description=None,
).save()
Company(
name="Google",
motto="Do No Evil",
ticker_name="GOOG",
description="Internet Company",
).save()
Company(
name="Yahoo", motto=None, ticker_name=None, description="Internet Company"
).save()
class Lower(Func):
function = "LOWER"
qs = (
Company.objects.annotate(
tagline=Func(
F("motto"),
F("ticker_name"),
F("description"),
Value("No Tag"),
function="COALESCE",
)
)
.annotate(
tagline_lower=Lower(F("tagline")),
)
.order_by("name")
)
# LOWER function supported by:
# oracle, postgres, mysql, sqlite, sqlserver
self.assertQuerysetEqual(
qs,
[
("Apple", "APPL".lower()),
("Django Software Foundation", "No Tag".lower()),
("Google", "Do No Evil".lower()),
("Yahoo", "Internet Company".lower()),
],
lambda c: (c.name, c.tagline_lower),
)
def test_boolean_value_annotation(self):
books = Book.objects.annotate(
is_book=Value(True, output_field=BooleanField()),
is_pony=Value(False, output_field=BooleanField()),
is_none=Value(None, output_field=BooleanField(null=True)),
)
self.assertGreater(len(books), 0)
for book in books:
self.assertIs(book.is_book, True)
self.assertIs(book.is_pony, False)
self.assertIsNone(book.is_none)
def test_annotation_in_f_grouped_by_annotation(self):
qs = (
Publisher.objects.annotate(multiplier=Value(3))
# group by option => sum of value * multiplier
.values("name")
.annotate(multiplied_value_sum=Sum(F("multiplier") * F("num_awards")))
.order_by()
)
self.assertCountEqual(
qs,
[
{"multiplied_value_sum": 9, "name": "Apress"},
{"multiplied_value_sum": 0, "name": "Jonno's House of Books"},
{"multiplied_value_sum": 27, "name": "Morgan Kaufmann"},
{"multiplied_value_sum": 21, "name": "Prentice Hall"},
{"multiplied_value_sum": 3, "name": "Sams"},
],
)
def test_arguments_must_be_expressions(self):
msg = "QuerySet.annotate() received non-expression(s): %s."
with self.assertRaisesMessage(TypeError, msg % BooleanField()):
Book.objects.annotate(BooleanField())
with self.assertRaisesMessage(TypeError, msg % True):
Book.objects.annotate(is_book=True)
with self.assertRaisesMessage(
TypeError, msg % ", ".join([str(BooleanField()), "True"])
):
Book.objects.annotate(BooleanField(), Value(False), is_book=True)
def test_chaining_annotation_filter_with_m2m(self):
qs = (
Author.objects.filter(
name="Adrian Holovaty",
friends__age=35,
)
.annotate(
jacob_name=F("friends__name"),
)
.filter(
friends__age=29,
)
.annotate(
james_name=F("friends__name"),
)
.values("jacob_name", "james_name")
)
self.assertCountEqual(
qs,
[{"jacob_name": "Jacob Kaplan-Moss", "james_name": "James Bennett"}],
)
def test_annotation_filter_with_subquery(self):
long_books_qs = (
Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=400,
)
.values("publisher")
.annotate(count=Count("pk"))
.values("count")
)
publisher_books_qs = (
Publisher.objects.annotate(
total_books=Count("book"),
)
.filter(
total_books=Subquery(long_books_qs, output_field=IntegerField()),
)
.values("name")
)
self.assertCountEqual(
publisher_books_qs, [{"name": "Sams"}, {"name": "Morgan Kaufmann"}]
)
def test_annotation_exists_aggregate_values_chaining(self):
qs = (
Book.objects.values("publisher")
.annotate(
has_authors=Exists(
Book.authors.through.objects.filter(book=OuterRef("pk"))
),
max_pubdate=Max("pubdate"),
)
.values_list("max_pubdate", flat=True)
.order_by("max_pubdate")
)
self.assertCountEqual(
qs,
[
datetime.date(1991, 10, 15),
datetime.date(2008, 3, 3),
datetime.date(2008, 6, 23),
datetime.date(2008, 11, 3),
],
)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_annotation_subquery_and_aggregate_values_chaining(self):
qs = (
Book.objects.annotate(pub_year=ExtractYear("pubdate"))
.values("pub_year")
.annotate(
top_rating=Subquery(
Book.objects.filter(pubdate__year=OuterRef("pub_year"))
.order_by("-rating")
.values("rating")[:1]
),
total_pages=Sum("pages"),
)
.values("pub_year", "total_pages", "top_rating")
)
self.assertCountEqual(
qs,
[
{"pub_year": 1991, "top_rating": 5.0, "total_pages": 946},
{"pub_year": 1995, "top_rating": 4.0, "total_pages": 1132},
{"pub_year": 2007, "top_rating": 4.5, "total_pages": 447},
{"pub_year": 2008, "top_rating": 4.0, "total_pages": 1178},
],
)
def test_annotation_subquery_outerref_transform(self):
qs = Book.objects.annotate(
top_rating_year=Subquery(
Book.objects.filter(pubdate__year=OuterRef("pubdate__year"))
.order_by("-rating")
.values("rating")[:1]
),
).values("pubdate__year", "top_rating_year")
self.assertCountEqual(
qs,
[
{"pubdate__year": 1991, "top_rating_year": 5.0},
{"pubdate__year": 1995, "top_rating_year": 4.0},
{"pubdate__year": 2007, "top_rating_year": 4.5},
{"pubdate__year": 2008, "top_rating_year": 4.0},
{"pubdate__year": 2008, "top_rating_year": 4.0},
{"pubdate__year": 2008, "top_rating_year": 4.0},
],
)
def test_annotation_aggregate_with_m2o(self):
qs = (
Author.objects.filter(age__lt=30)
.annotate(
max_pages=Case(
When(book_contact_set__isnull=True, then=Value(0)),
default=Max(F("book__pages")),
),
)
.values("name", "max_pages")
)
self.assertCountEqual(
qs,
[
{"name": "James Bennett", "max_pages": 300},
{"name": "Paul Bissex", "max_pages": 0},
{"name": "Wesley J. Chun", "max_pages": 0},
],
)
def test_alias_sql_injection(self):
crafted_alias = """injected_name" from "annotations_book"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Book.objects.annotate(**{crafted_alias: Value(1)})
def test_alias_forbidden_chars(self):
tests = [
'al"ias',
"a'lias",
"ali`as",
"alia s",
"alias\t",
"ali\nas",
"alias--",
"ali/*as",
"alias*/",
"alias;",
# [] are used by MSSQL.
"alias[",
"alias]",
]
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
for crafted_alias in tests:
with self.subTest(crafted_alias):
with self.assertRaisesMessage(ValueError, msg):
Book.objects.annotate(**{crafted_alias: Value(1)})
class AliasTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="James Bennett", age=34)
cls.a4 = Author.objects.create(name="Peter Norvig", age=57)
cls.a5 = Author.objects.create(name="Stuart Russell", age=46)
p1 = Publisher.objects.create(name="Apress", num_awards=3)
cls.b1 = Book.objects.create(
isbn="159059725",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=p1,
pubdate=datetime.date(2007, 12, 6),
name="The Definitive Guide to Django: Web Development Done Right",
)
cls.b2 = Book.objects.create(
isbn="159059996",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a3,
publisher=p1,
pubdate=datetime.date(2008, 6, 23),
name="Practical Django Projects",
)
cls.b3 = Book.objects.create(
isbn="013790395",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a4,
publisher=p1,
pubdate=datetime.date(1995, 1, 15),
name="Artificial Intelligence: A Modern Approach",
)
cls.b4 = Book.objects.create(
isbn="155860191",
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a4,
publisher=p1,
pubdate=datetime.date(1991, 10, 15),
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4, cls.a5)
cls.b4.authors.add(cls.a4)
Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
def test_basic_alias(self):
qs = Book.objects.alias(is_book=Value(1))
self.assertIs(hasattr(qs.first(), "is_book"), False)
def test_basic_alias_annotation(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
).annotate(is_book=F("is_book_alias"))
self.assertIs(hasattr(qs.first(), "is_book_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_basic_alias_f_annotation(self):
qs = Book.objects.alias(another_rating_alias=F("rating")).annotate(
another_rating=F("another_rating_alias")
)
self.assertIs(hasattr(qs.first(), "another_rating_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.another_rating, book.rating)
def test_basic_alias_f_transform_annotation(self):
qs = Book.objects.alias(
pubdate_alias=F("pubdate"),
).annotate(pubdate_year=F("pubdate_alias__year"))
self.assertIs(hasattr(qs.first(), "pubdate_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.pubdate_year, book.pubdate.year)
def test_alias_after_annotation(self):
qs = Book.objects.annotate(
is_book=Value(1),
).alias(is_book_alias=F("is_book"))
book = qs.first()
self.assertIs(hasattr(book, "is_book"), True)
self.assertIs(hasattr(book, "is_book_alias"), False)
def test_overwrite_annotation_with_alias(self):
qs = Book.objects.annotate(is_book=Value(1)).alias(is_book=F("is_book"))
self.assertIs(hasattr(qs.first(), "is_book"), False)
def test_overwrite_alias_with_annotation(self):
qs = Book.objects.alias(is_book=Value(1)).annotate(is_book=F("is_book"))
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_alias_annotation_expression(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
).annotate(is_book=Coalesce("is_book_alias", 0))
self.assertIs(hasattr(qs.first(), "is_book_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_alias_default_alias_expression(self):
qs = Author.objects.alias(
Sum("book__pages"),
).filter(book__pages__sum__gt=2000)
self.assertIs(hasattr(qs.first(), "book__pages__sum"), False)
self.assertSequenceEqual(qs, [self.a4])
def test_joined_alias_annotation(self):
qs = (
Book.objects.select_related("publisher")
.alias(
num_awards_alias=F("publisher__num_awards"),
)
.annotate(num_awards=F("num_awards_alias"))
)
self.assertIs(hasattr(qs.first(), "num_awards_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.num_awards, book.publisher.num_awards)
def test_alias_annotate_with_aggregation(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
rating_count_alias=Count("rating"),
).annotate(
is_book=F("is_book_alias"),
rating_count=F("rating_count_alias"),
)
book = qs.first()
self.assertIs(hasattr(book, "is_book_alias"), False)
self.assertIs(hasattr(book, "rating_count_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
self.assertEqual(book.rating_count, 1)
def test_filter_alias_with_f(self):
qs = Book.objects.alias(
other_rating=F("rating"),
).filter(other_rating=4.5)
self.assertIs(hasattr(qs.first(), "other_rating"), False)
self.assertSequenceEqual(qs, [self.b1])
def test_filter_alias_with_double_f(self):
qs = Book.objects.alias(
other_rating=F("rating"),
).filter(other_rating=F("rating"))
self.assertIs(hasattr(qs.first(), "other_rating"), False)
self.assertEqual(qs.count(), Book.objects.count())
def test_filter_alias_agg_with_double_f(self):
qs = Book.objects.alias(
sum_rating=Sum("rating"),
).filter(sum_rating=F("sum_rating"))
self.assertIs(hasattr(qs.first(), "sum_rating"), False)
self.assertEqual(qs.count(), Book.objects.count())
def test_update_with_alias(self):
Book.objects.alias(
other_rating=F("rating") - 1,
).update(rating=F("other_rating"))
self.b1.refresh_from_db()
self.assertEqual(self.b1.rating, 3.5)
def test_order_by_alias(self):
qs = Author.objects.alias(other_age=F("age")).order_by("other_age")
self.assertIs(hasattr(qs.first(), "other_age"), False)
self.assertQuerysetEqual(qs, [34, 34, 35, 46, 57], lambda a: a.age)
def test_order_by_alias_aggregate(self):
qs = (
Author.objects.values("age")
.alias(age_count=Count("age"))
.order_by("age_count", "age")
)
self.assertIs(hasattr(qs.first(), "age_count"), False)
self.assertQuerysetEqual(qs, [35, 46, 57, 34], lambda a: a["age"])
def test_dates_alias(self):
qs = Book.objects.alias(
pubdate_alias=F("pubdate"),
).dates("pubdate_alias", "month")
self.assertCountEqual(
qs,
[
datetime.date(1991, 10, 1),
datetime.date(1995, 1, 1),
datetime.date(2007, 12, 1),
datetime.date(2008, 6, 1),
],
)
def test_datetimes_alias(self):
qs = Store.objects.alias(
original_opening_alias=F("original_opening"),
).datetimes("original_opening_alias", "year")
self.assertCountEqual(
qs,
[
datetime.datetime(1994, 1, 1),
datetime.datetime(2001, 1, 1),
],
)
def test_aggregate_alias(self):
msg = (
"Cannot aggregate over the 'other_age' alias. Use annotate() to promote it."
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.alias(
other_age=F("age"),
).aggregate(otherage_sum=Sum("other_age"))
def test_defer_only_alias(self):
qs = Book.objects.alias(rating_alias=F("rating") - 1)
msg = "Book has no field named 'rating_alias'"
for operation in ["defer", "only"]:
with self.subTest(operation=operation):
with self.assertRaisesMessage(FieldDoesNotExist, msg):
getattr(qs, operation)("rating_alias").first()
@skipUnlessDBFeature("can_distinct_on_fields")
def test_distinct_on_alias(self):
qs = Book.objects.alias(rating_alias=F("rating") - 1)
msg = "Cannot resolve keyword 'rating_alias' into field."
with self.assertRaisesMessage(FieldError, msg):
qs.distinct("rating_alias").first()
def test_values_alias(self):
qs = Book.objects.alias(rating_alias=F("rating") - 1)
msg = "Cannot select the 'rating_alias' alias. Use annotate() to promote it."
for operation in ["values", "values_list"]:
with self.subTest(operation=operation):
with self.assertRaisesMessage(FieldError, msg):
getattr(qs, operation)("rating_alias")
def test_alias_sql_injection(self):
crafted_alias = """injected_name" from "annotations_book"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Book.objects.alias(**{crafted_alias: Value(1)})
|
a29c7e9901ae9ed9c5cac7f3f9f892b789a870cb5bd03fc98dc92be51be000ff | import datetime
import pickle
import sys
import unittest
from operator import attrgetter
from threading import Lock
from django.core.exceptions import EmptyResultSet, FieldError
from django.db import DEFAULT_DB_ALIAS, connection
from django.db.models import Count, Exists, F, Max, OuterRef, Q
from django.db.models.expressions import RawSQL
from django.db.models.sql.constants import LOUTER
from django.db.models.sql.where import NothingNode, WhereNode
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext, ignore_warnings
from django.utils.deprecation import RemovedInDjango50Warning
from .models import (
FK1,
Annotation,
Article,
Author,
BaseA,
BaseUser,
Book,
CategoryItem,
CategoryRelationship,
Celebrity,
Channel,
Chapter,
Child,
ChildObjectA,
Classroom,
CommonMixedCaseForeignKeys,
Company,
Cover,
CustomPk,
CustomPkTag,
DateTimePK,
Detail,
DumbCategory,
Eaten,
Employment,
ExtraInfo,
Fan,
Food,
Identifier,
Individual,
Item,
Job,
JobResponsibilities,
Join,
LeafA,
LeafB,
LoopX,
LoopZ,
ManagedModel,
Member,
MixedCaseDbColumnCategoryItem,
MixedCaseFieldCategoryItem,
ModelA,
ModelB,
ModelC,
ModelD,
MyObject,
NamedCategory,
Node,
Note,
NullableName,
Number,
ObjectA,
ObjectB,
ObjectC,
OneToOneCategory,
Order,
OrderItem,
Page,
Paragraph,
Person,
Plaything,
PointerA,
Program,
ProxyCategory,
ProxyObjectA,
ProxyObjectB,
Ranking,
Related,
RelatedIndividual,
RelatedObject,
Report,
ReportComment,
ReservedName,
Responsibility,
School,
SharedConnection,
SimpleCategory,
SingleObject,
SpecialCategory,
Staff,
StaffUser,
Student,
Tag,
Task,
Teacher,
Ticket21203Child,
Ticket21203Parent,
Ticket23605A,
Ticket23605B,
Ticket23605C,
TvChef,
Valid,
X,
)
class Queries1Tests(TestCase):
@classmethod
def setUpTestData(cls):
cls.nc1 = generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name="t1", category=generic)
cls.t2 = Tag.objects.create(name="t2", parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name="t3", parent=cls.t1)
cls.t4 = Tag.objects.create(name="t4", parent=cls.t3)
cls.t5 = Tag.objects.create(name="t5", parent=cls.t3)
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.n2 = Note.objects.create(note="n2", misc="bar", id=2)
cls.n3 = Note.objects.create(note="n3", misc="foo", id=3, negate=False)
cls.ann1 = Annotation.objects.create(name="a1", tag=cls.t1)
cls.ann1.notes.add(cls.n1)
ann2 = Annotation.objects.create(name="a2", tag=cls.t4)
ann2.notes.add(cls.n2, cls.n3)
# Create these out of order so that sorting by 'id' will be different to sorting
# by 'info'. Helps detect some problems later.
cls.e2 = ExtraInfo.objects.create(
info="e2", note=cls.n2, value=41, filterable=False
)
e1 = ExtraInfo.objects.create(info="e1", note=cls.n1, value=42)
cls.a1 = Author.objects.create(name="a1", num=1001, extra=e1)
cls.a2 = Author.objects.create(name="a2", num=2002, extra=e1)
cls.a3 = Author.objects.create(name="a3", num=3003, extra=cls.e2)
cls.a4 = Author.objects.create(name="a4", num=4004, extra=cls.e2)
cls.time1 = datetime.datetime(2007, 12, 19, 22, 25, 0)
cls.time2 = datetime.datetime(2007, 12, 19, 21, 0, 0)
time3 = datetime.datetime(2007, 12, 20, 22, 25, 0)
time4 = datetime.datetime(2007, 12, 20, 21, 0, 0)
cls.i1 = Item.objects.create(
name="one",
created=cls.time1,
modified=cls.time1,
creator=cls.a1,
note=cls.n3,
)
cls.i1.tags.set([cls.t1, cls.t2])
cls.i2 = Item.objects.create(
name="two", created=cls.time2, creator=cls.a2, note=cls.n2
)
cls.i2.tags.set([cls.t1, cls.t3])
cls.i3 = Item.objects.create(
name="three", created=time3, creator=cls.a2, note=cls.n3
)
cls.i4 = Item.objects.create(
name="four", created=time4, creator=cls.a4, note=cls.n3
)
cls.i4.tags.set([cls.t4])
cls.r1 = Report.objects.create(name="r1", creator=cls.a1)
cls.r2 = Report.objects.create(name="r2", creator=cls.a3)
cls.r3 = Report.objects.create(name="r3")
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering
# will be rank3, rank2, rank1.
cls.rank1 = Ranking.objects.create(rank=2, author=cls.a2)
cls.c1 = Cover.objects.create(title="first", item=cls.i4)
cls.c2 = Cover.objects.create(title="second", item=cls.i2)
def test_subquery_condition(self):
qs1 = Tag.objects.filter(pk__lte=0)
qs2 = Tag.objects.filter(parent__in=qs1)
qs3 = Tag.objects.filter(parent__in=qs2)
self.assertEqual(qs3.query.subq_aliases, {"T", "U", "V"})
self.assertIn("v0", str(qs3.query).lower())
qs4 = qs3.filter(parent__in=qs1)
self.assertEqual(qs4.query.subq_aliases, {"T", "U", "V"})
# It is possible to reuse U for the second subquery, no need to use W.
self.assertNotIn("w0", str(qs4.query).lower())
# So, 'U0."id"' is referenced in SELECT and WHERE twice.
self.assertEqual(str(qs4.query).lower().count("u0."), 4)
def test_ticket1050(self):
self.assertSequenceEqual(
Item.objects.filter(tags__isnull=True),
[self.i3],
)
self.assertSequenceEqual(
Item.objects.filter(tags__id__isnull=True),
[self.i3],
)
def test_ticket1801(self):
self.assertSequenceEqual(
Author.objects.filter(item=self.i2),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(item=self.i3),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(item=self.i2) & Author.objects.filter(item=self.i3),
[self.a2],
)
def test_ticket2306(self):
# Checking that no join types are "left outer" joins.
query = Item.objects.filter(tags=self.t2).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).order_by("name"),
[self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(tags=self.t2)),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).filter(
Q(creator__name="fred") | Q(tags=self.t2)
),
[self.i1],
)
# Each filter call is processed "at once" against a single table, so this is
# different from the previous example as it tries to find tags that are two
# things at once (rather than two tags).
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1) & Q(tags=self.t2)), []
)
self.assertSequenceEqual(
Item.objects.filter(
Q(tags=self.t1), Q(creator__name="fred") | Q(tags=self.t2)
),
[],
)
qs = Author.objects.filter(ranking__rank=2, ranking__id=self.rank1.id)
self.assertSequenceEqual(list(qs), [self.a2])
self.assertEqual(2, qs.query.count_active_tables(), 2)
qs = Author.objects.filter(ranking__rank=2).filter(ranking__id=self.rank1.id)
self.assertEqual(qs.query.count_active_tables(), 3)
def test_ticket4464(self):
self.assertSequenceEqual(
Item.objects.filter(tags=self.t1).filter(tags=self.t2),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2])
.distinct()
.order_by("name"),
[self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).filter(tags=self.t3),
[self.i2],
)
# Make sure .distinct() works with slicing (this was broken in Oracle).
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).order_by("name")[:3],
[self.i1, self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2])
.distinct()
.order_by("name")[:3],
[self.i1, self.i2],
)
def test_tickets_2080_3592(self):
self.assertSequenceEqual(
Author.objects.filter(item__name="one") | Author.objects.filter(name="a3"),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(item__name="one") | Q(name="a3")),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(name="a3") | Q(item__name="one")),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(item__name="three") | Q(report__name="r3")),
[self.a2],
)
def test_ticket6074(self):
# Merging two empty result sets shouldn't leave a queryset with no constraints
# (which would match everything).
self.assertSequenceEqual(Author.objects.filter(Q(id__in=[])), [])
self.assertSequenceEqual(Author.objects.filter(Q(id__in=[]) | Q(id__in=[])), [])
def test_tickets_1878_2939(self):
self.assertEqual(Item.objects.values("creator").distinct().count(), 3)
# Create something with a duplicate 'name' so that we can test multi-column
# cases (which require some tricky SQL transformations under the covers).
xx = Item(name="four", created=self.time1, creator=self.a2, note=self.n1)
xx.save()
self.assertEqual(
Item.objects.exclude(name="two")
.values("creator", "name")
.distinct()
.count(),
4,
)
self.assertEqual(
(
Item.objects.exclude(name="two")
.extra(select={"foo": "%s"}, select_params=(1,))
.values("creator", "name", "foo")
.distinct()
.count()
),
4,
)
self.assertEqual(
(
Item.objects.exclude(name="two")
.extra(select={"foo": "%s"}, select_params=(1,))
.values("creator", "name")
.distinct()
.count()
),
4,
)
xx.delete()
def test_ticket7323(self):
self.assertEqual(Item.objects.values("creator", "name").count(), 4)
def test_ticket2253(self):
q1 = Item.objects.order_by("name")
q2 = Item.objects.filter(id=self.i1.id)
self.assertSequenceEqual(q1, [self.i4, self.i1, self.i3, self.i2])
self.assertSequenceEqual(q2, [self.i1])
self.assertSequenceEqual(
(q1 | q2).order_by("name"),
[self.i4, self.i1, self.i3, self.i2],
)
self.assertSequenceEqual((q1 & q2).order_by("name"), [self.i1])
q1 = Item.objects.filter(tags=self.t1)
q2 = Item.objects.filter(note=self.n3, tags=self.t2)
q3 = Item.objects.filter(creator=self.a4)
self.assertSequenceEqual(
((q1 & q2) | q3).order_by("name"),
[self.i4, self.i1],
)
def test_order_by_tables(self):
q1 = Item.objects.order_by("name")
q2 = Item.objects.filter(id=self.i1.id)
list(q2)
combined_query = (q1 & q2).order_by("name").query
self.assertEqual(
len(
[
t
for t in combined_query.alias_map
if combined_query.alias_refcount[t]
]
),
1,
)
def test_order_by_join_unref(self):
"""
This test is related to the above one, testing that there aren't
old JOINs in the query.
"""
qs = Celebrity.objects.order_by("greatest_fan__fan_of")
self.assertIn("OUTER JOIN", str(qs.query))
qs = qs.order_by("id")
self.assertNotIn("OUTER JOIN", str(qs.query))
def test_get_clears_ordering(self):
"""
get() should clear ordering for optimization purposes.
"""
with CaptureQueriesContext(connection) as captured_queries:
Author.objects.order_by("name").get(pk=self.a1.pk)
self.assertNotIn("order by", captured_queries[0]["sql"].lower())
def test_tickets_4088_4306(self):
self.assertSequenceEqual(Report.objects.filter(creator=1001), [self.r1])
self.assertSequenceEqual(Report.objects.filter(creator__num=1001), [self.r1])
self.assertSequenceEqual(Report.objects.filter(creator__id=1001), [])
self.assertSequenceEqual(
Report.objects.filter(creator__id=self.a1.id), [self.r1]
)
self.assertSequenceEqual(Report.objects.filter(creator__name="a1"), [self.r1])
def test_ticket4510(self):
self.assertSequenceEqual(
Author.objects.filter(report__name="r1"),
[self.a1],
)
def test_ticket7378(self):
self.assertSequenceEqual(self.a1.report_set.all(), [self.r1])
def test_tickets_5324_6704(self):
self.assertSequenceEqual(
Item.objects.filter(tags__name="t4"),
[self.i4],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t4").order_by("name").distinct(),
[self.i1, self.i3, self.i2],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t4").order_by("name").distinct().reverse(),
[self.i2, self.i3, self.i1],
)
self.assertSequenceEqual(
Author.objects.exclude(item__name="one").distinct().order_by("name"),
[self.a2, self.a3, self.a4],
)
# Excluding across a m2m relation when there is more than one related
# object associated was problematic.
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t1").order_by("name"),
[self.i4, self.i3],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t1").exclude(tags__name="t4"),
[self.i3],
)
# Excluding from a relation that cannot be NULL should not use outer joins.
query = Item.objects.exclude(creator__in=[self.a1, self.a2]).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
# Similarly, when one of the joins cannot possibly, ever, involve NULL
# values (Author -> ExtraInfo, in the following), it should never be
# promoted to a left outer join. So the following query should only
# involve one "left outer" join (Author -> Item is 0-to-many).
qs = Author.objects.filter(id=self.a1.id).filter(
Q(extra__note=self.n1) | Q(item__note=self.n3)
)
self.assertEqual(
len(
[
x
for x in qs.query.alias_map.values()
if x.join_type == LOUTER and qs.query.alias_refcount[x.table_alias]
]
),
1,
)
# The previous changes shouldn't affect nullable foreign key joins.
self.assertSequenceEqual(
Tag.objects.filter(parent__isnull=True).order_by("name"), [self.t1]
)
self.assertSequenceEqual(
Tag.objects.exclude(parent__isnull=True).order_by("name"),
[self.t2, self.t3, self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__name="t1") | Q(parent__isnull=True)).order_by(
"name"
),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__isnull=True) | Q(parent__name="t1")).order_by(
"name"
),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__parent__isnull=True)).order_by("name"),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.filter(~Q(parent__parent__isnull=True)).order_by("name"),
[self.t4, self.t5],
)
def test_ticket2091(self):
t = Tag.objects.get(name="t4")
self.assertSequenceEqual(Item.objects.filter(tags__in=[t]), [self.i4])
def test_avoid_infinite_loop_on_too_many_subqueries(self):
x = Tag.objects.filter(pk=1)
local_recursion_limit = sys.getrecursionlimit() // 16
msg = "Maximum recursion depth exceeded: too many subqueries."
with self.assertRaisesMessage(RecursionError, msg):
for i in range(local_recursion_limit + 2):
x = Tag.objects.filter(pk__in=x)
def test_reasonable_number_of_subq_aliases(self):
x = Tag.objects.filter(pk=1)
for _ in range(20):
x = Tag.objects.filter(pk__in=x)
self.assertEqual(
x.query.subq_aliases,
{
"T",
"U",
"V",
"W",
"X",
"Y",
"Z",
"AA",
"AB",
"AC",
"AD",
"AE",
"AF",
"AG",
"AH",
"AI",
"AJ",
"AK",
"AL",
"AM",
"AN",
},
)
def test_heterogeneous_qs_combination(self):
# Combining querysets built on different models should behave in a well-defined
# fashion. We raise an error.
msg = "Cannot combine queries on two different base models."
with self.assertRaisesMessage(TypeError, msg):
Author.objects.all() & Tag.objects.all()
with self.assertRaisesMessage(TypeError, msg):
Author.objects.all() | Tag.objects.all()
def test_ticket3141(self):
self.assertEqual(Author.objects.extra(select={"foo": "1"}).count(), 4)
self.assertEqual(
Author.objects.extra(select={"foo": "%s"}, select_params=(1,)).count(), 4
)
def test_ticket2400(self):
self.assertSequenceEqual(
Author.objects.filter(item__isnull=True),
[self.a3],
)
self.assertSequenceEqual(
Tag.objects.filter(item__isnull=True),
[self.t5],
)
def test_ticket2496(self):
self.assertSequenceEqual(
Item.objects.extra(tables=["queries_author"])
.select_related()
.order_by("name")[:1],
[self.i4],
)
def test_error_raised_on_filter_with_dictionary(self):
with self.assertRaisesMessage(FieldError, "Cannot parse keyword query as dict"):
Note.objects.filter({"note": "n1", "misc": "foo"})
def test_tickets_2076_7256(self):
# Ordering on related tables should be possible, even if the table is
# not otherwise involved.
self.assertSequenceEqual(
Item.objects.order_by("note__note", "name"),
[self.i2, self.i4, self.i1, self.i3],
)
# Ordering on a related field should use the remote model's default
# ordering as a final step.
self.assertSequenceEqual(
Author.objects.order_by("extra", "-name"),
[self.a2, self.a1, self.a4, self.a3],
)
# Using remote model default ordering can span multiple models (in this
# case, Cover is ordered by Item's default, which uses Note's default).
self.assertSequenceEqual(Cover.objects.all(), [self.c1, self.c2])
# If the remote model does not have a default ordering, we order by its 'id'
# field.
self.assertSequenceEqual(
Item.objects.order_by("creator", "name"),
[self.i1, self.i3, self.i2, self.i4],
)
# Ordering by a many-valued attribute (e.g. a many-to-many or reverse
# ForeignKey) is legal, but the results might not make sense. That
# isn't Django's problem. Garbage in, garbage out.
self.assertSequenceEqual(
Item.objects.filter(tags__isnull=False).order_by("tags", "id"),
[self.i1, self.i2, self.i1, self.i2, self.i4],
)
# If we replace the default ordering, Django adjusts the required
# tables automatically. Item normally requires a join with Note to do
# the default ordering, but that isn't needed here.
qs = Item.objects.order_by("name")
self.assertSequenceEqual(qs, [self.i4, self.i1, self.i3, self.i2])
self.assertEqual(len(qs.query.alias_map), 1)
def test_tickets_2874_3002(self):
qs = Item.objects.select_related().order_by("note__note", "name")
self.assertQuerysetEqual(qs, [self.i2, self.i4, self.i1, self.i3])
# This is also a good select_related() test because there are multiple
# Note entries in the SQL. The two Note items should be different.
self.assertEqual(repr(qs[0].note), "<Note: n2>")
self.assertEqual(repr(qs[0].creator.extra.note), "<Note: n1>")
def test_ticket3037(self):
self.assertSequenceEqual(
Item.objects.filter(
Q(creator__name="a3", name="two") | Q(creator__name="a4", name="four")
),
[self.i4],
)
def test_tickets_5321_7070(self):
# Ordering columns must be included in the output columns. Note that
# this means results that might otherwise be distinct are not (if there
# are multiple values in the ordering cols), as in this example. This
# isn't a bug; it's a warning to be careful with the selection of
# ordering columns.
self.assertSequenceEqual(
Note.objects.values("misc").distinct().order_by("note", "-misc"),
[{"misc": "foo"}, {"misc": "bar"}, {"misc": "foo"}],
)
def test_ticket4358(self):
# If you don't pass any fields to values(), relation fields are
# returned as "foo_id" keys, not "foo". For consistency, you should be
# able to pass "foo_id" in the fields list and have it work, too. We
# actually allow both "foo" and "foo_id".
# The *_id version is returned by default.
self.assertIn("note_id", ExtraInfo.objects.values()[0])
# You can also pass it in explicitly.
self.assertSequenceEqual(
ExtraInfo.objects.values("note_id"), [{"note_id": 1}, {"note_id": 2}]
)
# ...or use the field name.
self.assertSequenceEqual(
ExtraInfo.objects.values("note"), [{"note": 1}, {"note": 2}]
)
def test_ticket6154(self):
# Multiple filter statements are joined using "AND" all the time.
self.assertSequenceEqual(
Author.objects.filter(id=self.a1.id).filter(
Q(extra__note=self.n1) | Q(item__note=self.n3)
),
[self.a1],
)
self.assertSequenceEqual(
Author.objects.filter(
Q(extra__note=self.n1) | Q(item__note=self.n3)
).filter(id=self.a1.id),
[self.a1],
)
def test_ticket6981(self):
self.assertSequenceEqual(
Tag.objects.select_related("parent").order_by("name"),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
def test_ticket9926(self):
self.assertSequenceEqual(
Tag.objects.select_related("parent", "category").order_by("name"),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.select_related("parent", "parent__category").order_by("name"),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
def test_tickets_6180_6203(self):
# Dates with limits and/or counts
self.assertEqual(Item.objects.count(), 4)
self.assertEqual(Item.objects.datetimes("created", "month").count(), 1)
self.assertEqual(Item.objects.datetimes("created", "day").count(), 2)
self.assertEqual(len(Item.objects.datetimes("created", "day")), 2)
self.assertEqual(
Item.objects.datetimes("created", "day")[0],
datetime.datetime(2007, 12, 19, 0, 0),
)
def test_tickets_7087_12242(self):
# Dates with extra select columns
self.assertSequenceEqual(
Item.objects.datetimes("created", "day").extra(select={"a": 1}),
[
datetime.datetime(2007, 12, 19, 0, 0),
datetime.datetime(2007, 12, 20, 0, 0),
],
)
self.assertSequenceEqual(
Item.objects.extra(select={"a": 1}).datetimes("created", "day"),
[
datetime.datetime(2007, 12, 19, 0, 0),
datetime.datetime(2007, 12, 20, 0, 0),
],
)
name = "one"
self.assertSequenceEqual(
Item.objects.datetimes("created", "day").extra(
where=["name=%s"], params=[name]
),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
self.assertSequenceEqual(
Item.objects.extra(where=["name=%s"], params=[name]).datetimes(
"created", "day"
),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
def test_ticket7155(self):
# Nullable dates
self.assertSequenceEqual(
Item.objects.datetimes("modified", "day"),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
def test_order_by_rawsql(self):
self.assertSequenceEqual(
Item.objects.values("note__note").order_by(
RawSQL("queries_note.note", ()),
"id",
),
[
{"note__note": "n2"},
{"note__note": "n3"},
{"note__note": "n3"},
{"note__note": "n3"},
],
)
def test_ticket7096(self):
# Make sure exclude() with multiple conditions continues to work.
self.assertSequenceEqual(
Tag.objects.filter(parent=self.t1, name="t3").order_by("name"),
[self.t3],
)
self.assertSequenceEqual(
Tag.objects.exclude(parent=self.t1, name="t3").order_by("name"),
[self.t1, self.t2, self.t4, self.t5],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t1", name="one")
.order_by("name")
.distinct(),
[self.i4, self.i3, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(name__in=["three", "four"])
.exclude(tags__name="t1")
.order_by("name"),
[self.i4, self.i3],
)
# More twisted cases, involving nested negations.
self.assertSequenceEqual(
Item.objects.exclude(~Q(tags__name="t1", name="one")),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(~Q(tags__name="t1", name="one"), name="two"),
[self.i2],
)
self.assertSequenceEqual(
Item.objects.exclude(~Q(tags__name="t1", name="one"), name="two"),
[self.i4, self.i1, self.i3],
)
def test_tickets_7204_7506(self):
# Make sure querysets with related fields can be pickled. If this
# doesn't crash, it's a Good Thing.
pickle.dumps(Item.objects.all())
def test_ticket7813(self):
# We should also be able to pickle things that use select_related().
# The only tricky thing here is to ensure that we do the related
# selections properly after unpickling.
qs = Item.objects.select_related()
query = qs.query.get_compiler(qs.db).as_sql()[0]
query2 = pickle.loads(pickle.dumps(qs.query))
self.assertEqual(query2.get_compiler(qs.db).as_sql()[0], query)
def test_deferred_load_qs_pickling(self):
# Check pickling of deferred-loading querysets
qs = Item.objects.defer("name", "creator")
q2 = pickle.loads(pickle.dumps(qs))
self.assertEqual(list(qs), list(q2))
q3 = pickle.loads(pickle.dumps(qs, pickle.HIGHEST_PROTOCOL))
self.assertEqual(list(qs), list(q3))
def test_ticket7277(self):
self.assertSequenceEqual(
self.n1.annotation_set.filter(
Q(tag=self.t5)
| Q(tag__children=self.t5)
| Q(tag__children__children=self.t5)
),
[self.ann1],
)
def test_tickets_7448_7707(self):
# Complex objects should be converted to strings before being used in
# lookups.
self.assertSequenceEqual(
Item.objects.filter(created__in=[self.time1, self.time2]),
[self.i1, self.i2],
)
def test_ticket7235(self):
# An EmptyQuerySet should not raise exceptions if it is filtered.
Eaten.objects.create(meal="m")
q = Eaten.objects.none()
with self.assertNumQueries(0):
self.assertQuerysetEqual(q.all(), [])
self.assertQuerysetEqual(q.filter(meal="m"), [])
self.assertQuerysetEqual(q.exclude(meal="m"), [])
self.assertQuerysetEqual(q.complex_filter({"pk": 1}), [])
self.assertQuerysetEqual(q.select_related("food"), [])
self.assertQuerysetEqual(q.annotate(Count("food")), [])
self.assertQuerysetEqual(q.order_by("meal", "food"), [])
self.assertQuerysetEqual(q.distinct(), [])
self.assertQuerysetEqual(q.extra(select={"foo": "1"}), [])
self.assertQuerysetEqual(q.reverse(), [])
q.query.low_mark = 1
msg = "Cannot change a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
q.extra(select={"foo": "1"})
self.assertQuerysetEqual(q.defer("meal"), [])
self.assertQuerysetEqual(q.only("meal"), [])
def test_ticket7791(self):
# There were "issues" when ordering and distinct-ing on fields related
# via ForeignKeys.
self.assertEqual(len(Note.objects.order_by("extrainfo__info").distinct()), 3)
# Pickling of QuerySets using datetimes() should work.
qs = Item.objects.datetimes("created", "month")
pickle.loads(pickle.dumps(qs))
def test_ticket9997(self):
# If a ValuesList or Values queryset is passed as an inner query, we
# make sure it's only requesting a single value and use that as the
# thing to select.
self.assertSequenceEqual(
Tag.objects.filter(
name__in=Tag.objects.filter(parent=self.t1).values("name")
),
[self.t2, self.t3],
)
# Multi-valued values() and values_list() querysets should raise errors.
with self.assertRaisesMessage(
TypeError, "Cannot use multi-field values as a filter value."
):
Tag.objects.filter(
name__in=Tag.objects.filter(parent=self.t1).values("name", "id")
)
with self.assertRaisesMessage(
TypeError, "Cannot use multi-field values as a filter value."
):
Tag.objects.filter(
name__in=Tag.objects.filter(parent=self.t1).values_list("name", "id")
)
def test_ticket9985(self):
# qs.values_list(...).values(...) combinations should work.
self.assertSequenceEqual(
Note.objects.values_list("note", flat=True).values("id").order_by("id"),
[{"id": 1}, {"id": 2}, {"id": 3}],
)
self.assertSequenceEqual(
Annotation.objects.filter(
notes__in=Note.objects.filter(note="n1")
.values_list("note")
.values("id")
),
[self.ann1],
)
def test_ticket10205(self):
# When bailing out early because of an empty "__in" filter, we need
# to set things up correctly internally so that subqueries can continue
# properly.
self.assertEqual(Tag.objects.filter(name__in=()).update(name="foo"), 0)
def test_ticket10432(self):
# Testing an empty "__in" filter with a generator as the value.
def f():
return iter([])
n_obj = Note.objects.all()[0]
def g():
yield n_obj.pk
self.assertQuerysetEqual(Note.objects.filter(pk__in=f()), [])
self.assertEqual(list(Note.objects.filter(pk__in=g())), [n_obj])
def test_ticket10742(self):
# Queries used in an __in clause don't execute subqueries
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.filter(pk__in=subq)
self.assertSequenceEqual(qs, [self.a1, self.a2])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.exclude(pk__in=subq)
self.assertSequenceEqual(qs, [self.a3, self.a4])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
self.assertSequenceEqual(
Author.objects.filter(Q(pk__in=subq) & Q(name="a1")),
[self.a1],
)
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
def test_ticket7076(self):
# Excluding shouldn't eliminate NULL entries.
self.assertSequenceEqual(
Item.objects.exclude(modified=self.time1).order_by("name"),
[self.i4, self.i3, self.i2],
)
self.assertSequenceEqual(
Tag.objects.exclude(parent__name=self.t1.name),
[self.t1, self.t4, self.t5],
)
def test_ticket7181(self):
# Ordering by related tables should accommodate nullable fields (this
# test is a little tricky, since NULL ordering is database dependent.
# Instead, we just count the number of results).
self.assertEqual(len(Tag.objects.order_by("parent__name")), 5)
# Empty querysets can be merged with others.
self.assertSequenceEqual(
Note.objects.none() | Note.objects.all(),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(
Note.objects.all() | Note.objects.none(),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(Note.objects.none() & Note.objects.all(), [])
self.assertSequenceEqual(Note.objects.all() & Note.objects.none(), [])
def test_ticket8439(self):
# Complex combinations of conjunctions, disjunctions and nullable
# relations.
self.assertSequenceEqual(
Author.objects.filter(
Q(item__note__extrainfo=self.e2) | Q(report=self.r1, name="xyz")
),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(
Q(report=self.r1, name="xyz") | Q(item__note__extrainfo=self.e2)
),
[self.a2],
)
self.assertSequenceEqual(
Annotation.objects.filter(
Q(tag__parent=self.t1) | Q(notes__note="n1", name="a1")
),
[self.ann1],
)
xx = ExtraInfo.objects.create(info="xx", note=self.n3)
self.assertSequenceEqual(
Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)),
[self.n1, self.n3],
)
q = Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)).query
self.assertEqual(
len(
[
x
for x in q.alias_map.values()
if x.join_type == LOUTER and q.alias_refcount[x.table_alias]
]
),
1,
)
def test_ticket17429(self):
"""
Meta.ordering=None works the same as Meta.ordering=[]
"""
original_ordering = Tag._meta.ordering
Tag._meta.ordering = None
try:
self.assertCountEqual(
Tag.objects.all(),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
finally:
Tag._meta.ordering = original_ordering
def test_exclude(self):
self.assertQuerysetEqual(
Item.objects.exclude(tags__name="t4"),
Item.objects.filter(~Q(tags__name="t4")),
)
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name="t4") | Q(tags__name="t3")),
Item.objects.filter(~(Q(tags__name="t4") | Q(tags__name="t3"))),
)
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name="t4") | ~Q(tags__name="t3")),
Item.objects.filter(~(Q(tags__name="t4") | ~Q(tags__name="t3"))),
)
def test_nested_exclude(self):
self.assertQuerysetEqual(
Item.objects.exclude(~Q(tags__name="t4")),
Item.objects.filter(~~Q(tags__name="t4")),
)
def test_double_exclude(self):
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name="t4")),
Item.objects.filter(~~Q(tags__name="t4")),
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name="t4")),
Item.objects.filter(~Q(~Q(tags__name="t4"))),
)
def test_exclude_in(self):
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name__in=["t4", "t3"])),
Item.objects.filter(~Q(tags__name__in=["t4", "t3"])),
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name__in=["t4", "t3"])),
Item.objects.filter(~~Q(tags__name__in=["t4", "t3"])),
)
def test_ticket_10790_1(self):
# Querying direct fields with isnull should trim the left outer join.
# It also should not create INNER JOIN.
q = Tag.objects.filter(parent__isnull=True)
self.assertSequenceEqual(q, [self.t1])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.filter(parent__isnull=False)
self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.exclude(parent__isnull=True)
self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.exclude(parent__isnull=False)
self.assertSequenceEqual(q, [self.t1])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.exclude(parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.t1, self.t2, self.t3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
self.assertNotIn("INNER JOIN", str(q.query))
def test_ticket_10790_2(self):
# Querying across several tables should strip only the last outer join,
# while preserving the preceding inner joins.
q = Tag.objects.filter(parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.t4, self.t5])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
# Querying without isnull should not convert anything to left outer join.
q = Tag.objects.filter(parent__parent=self.t1)
self.assertSequenceEqual(q, [self.t4, self.t5])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
def test_ticket_10790_3(self):
# Querying via indirect fields should populate the left outer join
q = NamedCategory.objects.filter(tag__isnull=True)
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
# join to dumbcategory ptr_id
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
self.assertSequenceEqual(q, [])
# Querying across several tables should strip only the last join, while
# preserving the preceding left outer joins.
q = NamedCategory.objects.filter(tag__parent__isnull=True)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
self.assertSequenceEqual(q, [self.nc1])
def test_ticket_10790_4(self):
# Querying across m2m field should not strip the m2m table from join.
q = Author.objects.filter(item__tags__isnull=True)
self.assertSequenceEqual(q, [self.a2, self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 2)
self.assertNotIn("INNER JOIN", str(q.query))
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 3)
self.assertNotIn("INNER JOIN", str(q.query))
def test_ticket_10790_5(self):
# Querying with isnull=False across m2m field should not create outer joins
q = Author.objects.filter(item__tags__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a1, self.a2, self.a2, self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 2)
q = Author.objects.filter(item__tags__parent__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 3)
q = Author.objects.filter(item__tags__parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 4)
def test_ticket_10790_6(self):
# Querying with isnull=True across m2m field should not create inner joins
# and strip last outer join
q = Author.objects.filter(item__tags__parent__parent__isnull=True)
self.assertSequenceEqual(
q,
[self.a1, self.a1, self.a2, self.a2, self.a2, self.a3],
)
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 4)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 3)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
def test_ticket_10790_7(self):
# Reverse querying with isnull should not strip the join
q = Author.objects.filter(item__isnull=True)
self.assertSequenceEqual(q, [self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
q = Author.objects.filter(item__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
def test_ticket_10790_8(self):
# Querying with combined q-objects should also strip the left outer join
q = Tag.objects.filter(Q(parent__isnull=True) | Q(parent=self.t1))
self.assertSequenceEqual(q, [self.t1, self.t2, self.t3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
def test_ticket_10790_combine(self):
# Combining queries should not re-populate the left outer join
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__isnull=False)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3, self.t4, self.t5])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q3 = q1 & q2
self.assertSequenceEqual(q3, [])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q2 = Tag.objects.filter(parent=self.t1)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q3 = q2 | q1
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__parent__isnull=True)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q3 = q2 | q1
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
def test_ticket19672(self):
self.assertSequenceEqual(
Report.objects.filter(
Q(creator__isnull=False) & ~Q(creator__extra__value=41)
),
[self.r1],
)
def test_ticket_20250(self):
# A negated Q along with an annotated queryset failed in Django 1.4
qs = Author.objects.annotate(Count("item"))
qs = qs.filter(~Q(extra__value=0)).order_by("name")
self.assertIn("SELECT", str(qs.query))
self.assertSequenceEqual(qs, [self.a1, self.a2, self.a3, self.a4])
def test_lookup_constraint_fielderror(self):
msg = (
"Cannot resolve keyword 'unknown_field' into field. Choices are: "
"annotation, category, category_id, children, id, item, "
"managedmodel, name, note, parent, parent_id"
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.filter(unknown_field__name="generic")
def test_common_mixed_case_foreign_keys(self):
"""
Valid query should be generated when fields fetched from joined tables
include FKs whose names only differ by case.
"""
c1 = SimpleCategory.objects.create(name="c1")
c2 = SimpleCategory.objects.create(name="c2")
c3 = SimpleCategory.objects.create(name="c3")
category = CategoryItem.objects.create(category=c1)
mixed_case_field_category = MixedCaseFieldCategoryItem.objects.create(
CaTeGoRy=c2
)
mixed_case_db_column_category = MixedCaseDbColumnCategoryItem.objects.create(
category=c3
)
CommonMixedCaseForeignKeys.objects.create(
category=category,
mixed_case_field_category=mixed_case_field_category,
mixed_case_db_column_category=mixed_case_db_column_category,
)
qs = CommonMixedCaseForeignKeys.objects.values(
"category",
"mixed_case_field_category",
"mixed_case_db_column_category",
"category__category",
"mixed_case_field_category__CaTeGoRy",
"mixed_case_db_column_category__category",
)
self.assertTrue(qs.first())
def test_excluded_intermediary_m2m_table_joined(self):
self.assertSequenceEqual(
Note.objects.filter(~Q(tag__annotation__name=F("note"))),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(
Note.objects.filter(tag__annotation__name="a1").filter(
~Q(tag__annotation__name=F("note"))
),
[],
)
def test_field_with_filterable(self):
self.assertSequenceEqual(
Author.objects.filter(extra=self.e2),
[self.a3, self.a4],
)
def test_negate_field(self):
self.assertSequenceEqual(
Note.objects.filter(negate=True),
[self.n1, self.n2],
)
self.assertSequenceEqual(Note.objects.exclude(negate=True), [self.n3])
class Queries2Tests(TestCase):
@classmethod
def setUpTestData(cls):
cls.num4 = Number.objects.create(num=4)
cls.num8 = Number.objects.create(num=8)
cls.num12 = Number.objects.create(num=12)
def test_ticket4289(self):
# A slight variation on the restricting the filtering choices by the
# lookup constraints.
self.assertSequenceEqual(Number.objects.filter(num__lt=4), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=8, num__lt=12), [])
self.assertSequenceEqual(
Number.objects.filter(num__gt=8, num__lt=13),
[self.num12],
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__lt=4) | Q(num__gt=8, num__lt=12)), []
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=8, num__lt=12) | Q(num__lt=4)), []
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=8) & Q(num__lt=12) | Q(num__lt=4)), []
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=7) & Q(num__lt=12) | Q(num__lt=4)),
[self.num8],
)
def test_ticket12239(self):
# Custom lookups are registered to round float values correctly on gte
# and lt IntegerField queries.
self.assertSequenceEqual(
Number.objects.filter(num__gt=11.9),
[self.num12],
)
self.assertSequenceEqual(Number.objects.filter(num__gt=12), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=12.0), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=12.1), [])
self.assertCountEqual(
Number.objects.filter(num__lt=12),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lt=12.0),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lt=12.1),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=11.9),
[self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=12),
[self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=12.0),
[self.num12],
)
self.assertSequenceEqual(Number.objects.filter(num__gte=12.1), [])
self.assertSequenceEqual(Number.objects.filter(num__gte=12.9), [])
self.assertCountEqual(
Number.objects.filter(num__lte=11.9),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.0),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.1),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.9),
[self.num4, self.num8, self.num12],
)
def test_ticket7759(self):
# Count should work with a partially read result set.
count = Number.objects.count()
qs = Number.objects.all()
def run():
for obj in qs:
return qs.count() == count
self.assertTrue(run())
class Queries3Tests(TestCase):
def test_ticket7107(self):
# This shouldn't create an infinite loop.
self.assertQuerysetEqual(Valid.objects.all(), [])
def test_datetimes_invalid_field(self):
# An error should be raised when QuerySet.datetimes() is passed the
# wrong type of field.
msg = "'name' isn't a DateField, TimeField, or DateTimeField."
with self.assertRaisesMessage(TypeError, msg):
Item.objects.datetimes("name", "month")
def test_ticket22023(self):
with self.assertRaisesMessage(
TypeError, "Cannot call only() after .values() or .values_list()"
):
Valid.objects.values().only()
with self.assertRaisesMessage(
TypeError, "Cannot call defer() after .values() or .values_list()"
):
Valid.objects.values().defer()
class Queries4Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name="t1", category=generic)
n1 = Note.objects.create(note="n1", misc="foo")
n2 = Note.objects.create(note="n2", misc="bar")
e1 = ExtraInfo.objects.create(info="e1", note=n1)
e2 = ExtraInfo.objects.create(info="e2", note=n2)
cls.a1 = Author.objects.create(name="a1", num=1001, extra=e1)
cls.a3 = Author.objects.create(name="a3", num=3003, extra=e2)
cls.r1 = Report.objects.create(name="r1", creator=cls.a1)
cls.r2 = Report.objects.create(name="r2", creator=cls.a3)
cls.r3 = Report.objects.create(name="r3")
cls.i1 = Item.objects.create(
name="i1", created=datetime.datetime.now(), note=n1, creator=cls.a1
)
cls.i2 = Item.objects.create(
name="i2", created=datetime.datetime.now(), note=n1, creator=cls.a3
)
def test_ticket24525(self):
tag = Tag.objects.create()
anth100 = tag.note_set.create(note="ANTH", misc="100")
math101 = tag.note_set.create(note="MATH", misc="101")
s1 = tag.annotation_set.create(name="1")
s2 = tag.annotation_set.create(name="2")
s1.notes.set([math101, anth100])
s2.notes.set([math101])
result = math101.annotation_set.all() & tag.annotation_set.exclude(
notes__in=[anth100]
)
self.assertEqual(list(result), [s2])
def test_ticket11811(self):
unsaved_category = NamedCategory(name="Other")
msg = (
"Unsaved model instance <NamedCategory: Other> cannot be used in an ORM "
"query."
)
with self.assertRaisesMessage(ValueError, msg):
Tag.objects.filter(pk=self.t1.pk).update(category=unsaved_category)
def test_ticket14876(self):
# Note: when combining the query we need to have information available
# about the join type of the trimmed "creator__isnull" join. If we
# don't have that information, then the join is created as INNER JOIN
# and results will be incorrect.
q1 = Report.objects.filter(
Q(creator__isnull=True) | Q(creator__extra__info="e1")
)
q2 = Report.objects.filter(Q(creator__isnull=True)) | Report.objects.filter(
Q(creator__extra__info="e1")
)
self.assertCountEqual(q1, [self.r1, self.r3])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Report.objects.filter(
Q(creator__extra__info="e1") | Q(creator__isnull=True)
)
q2 = Report.objects.filter(
Q(creator__extra__info="e1")
) | Report.objects.filter(Q(creator__isnull=True))
self.assertCountEqual(q1, [self.r1, self.r3])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(
Q(creator=self.a1) | Q(creator__report__name="r1")
).order_by()
q2 = (
Item.objects.filter(Q(creator=self.a1)).order_by()
| Item.objects.filter(Q(creator__report__name="r1")).order_by()
)
self.assertCountEqual(q1, [self.i1])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(
Q(creator__report__name="e1") | Q(creator=self.a1)
).order_by()
q2 = (
Item.objects.filter(Q(creator__report__name="e1")).order_by()
| Item.objects.filter(Q(creator=self.a1)).order_by()
)
self.assertCountEqual(q1, [self.i1])
self.assertEqual(str(q1.query), str(q2.query))
def test_combine_join_reuse(self):
# Joins having identical connections are correctly recreated in the
# rhs query, in case the query is ORed together (#18748).
Report.objects.create(name="r4", creator=self.a1)
q1 = Author.objects.filter(report__name="r5")
q2 = Author.objects.filter(report__name="r4").filter(report__name="r1")
combined = q1 | q2
self.assertEqual(str(combined.query).count("JOIN"), 2)
self.assertEqual(len(combined), 1)
self.assertEqual(combined[0].name, "a1")
def test_combine_or_filter_reuse(self):
combined = Author.objects.filter(name="a1") | Author.objects.filter(name="a3")
self.assertEqual(combined.get(name="a1"), self.a1)
def test_join_reuse_order(self):
# Join aliases are reused in order. This shouldn't raise AssertionError
# because change_map contains a circular reference (#26522).
s1 = School.objects.create()
s2 = School.objects.create()
s3 = School.objects.create()
t1 = Teacher.objects.create()
otherteachers = Teacher.objects.exclude(pk=t1.pk).exclude(friends=t1)
qs1 = otherteachers.filter(schools=s1).filter(schools=s2)
qs2 = otherteachers.filter(schools=s1).filter(schools=s3)
self.assertQuerysetEqual(qs1 | qs2, [])
def test_ticket7095(self):
# Updates that are filtered on the model being updated are somewhat
# tricky in MySQL.
ManagedModel.objects.create(data="mm1", tag=self.t1, public=True)
self.assertEqual(ManagedModel.objects.update(data="mm"), 1)
# A values() or values_list() query across joined models must use outer
# joins appropriately.
# Note: In Oracle, we expect a null CharField to return '' instead of
# None.
if connection.features.interprets_empty_strings_as_nulls:
expected_null_charfield_repr = ""
else:
expected_null_charfield_repr = None
self.assertSequenceEqual(
Report.objects.values_list("creator__extra__info", flat=True).order_by(
"name"
),
["e1", "e2", expected_null_charfield_repr],
)
# Similarly for select_related(), joins beyond an initial nullable join
# must use outer joins so that all results are included.
self.assertSequenceEqual(
Report.objects.select_related("creator", "creator__extra").order_by("name"),
[self.r1, self.r2, self.r3],
)
# When there are multiple paths to a table from another table, we have
# to be careful not to accidentally reuse an inappropriate join when
# using select_related(). We used to return the parent's Detail record
# here by mistake.
d1 = Detail.objects.create(data="d1")
d2 = Detail.objects.create(data="d2")
m1 = Member.objects.create(name="m1", details=d1)
m2 = Member.objects.create(name="m2", details=d2)
Child.objects.create(person=m2, parent=m1)
obj = m1.children.select_related("person__details")[0]
self.assertEqual(obj.person.details.data, "d2")
def test_order_by_resetting(self):
# Calling order_by() with no parameters removes any existing ordering on the
# model. But it should still be possible to add new ordering after that.
qs = Author.objects.order_by().order_by("name")
self.assertIn("ORDER BY", qs.query.get_compiler(qs.db).as_sql()[0])
def test_order_by_reverse_fk(self):
# It is possible to order by reverse of foreign key, although that can lead
# to duplicate results.
c1 = SimpleCategory.objects.create(name="category1")
c2 = SimpleCategory.objects.create(name="category2")
CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c1)
self.assertSequenceEqual(
SimpleCategory.objects.order_by("categoryitem", "pk"), [c1, c2, c1]
)
def test_filter_reverse_non_integer_pk(self):
date_obj = DateTimePK.objects.create()
extra_obj = ExtraInfo.objects.create(info="extra", date=date_obj)
self.assertEqual(
DateTimePK.objects.filter(extrainfo=extra_obj).get(),
date_obj,
)
def test_ticket10181(self):
# Avoid raising an EmptyResultSet if an inner query is probably
# empty (and hence, not executed).
self.assertQuerysetEqual(
Tag.objects.filter(id__in=Tag.objects.filter(id__in=[])), []
)
def test_ticket15316_filter_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_exclude_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_filter_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_exclude_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_one2one_filter_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(
category__onetoonecategory__isnull=False
).order_by("pk")
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_one2one_exclude_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_one2one_filter_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_one2one_exclude_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(
category__onetoonecategory__isnull=True
).order_by("pk")
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
class Queries5Tests(TestCase):
@classmethod
def setUpTestData(cls):
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the
# Meta.ordering will be rank3, rank2, rank1.
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.n2 = Note.objects.create(note="n2", misc="bar", id=2)
e1 = ExtraInfo.objects.create(info="e1", note=cls.n1)
e2 = ExtraInfo.objects.create(info="e2", note=cls.n2)
a1 = Author.objects.create(name="a1", num=1001, extra=e1)
a2 = Author.objects.create(name="a2", num=2002, extra=e1)
a3 = Author.objects.create(name="a3", num=3003, extra=e2)
cls.rank2 = Ranking.objects.create(rank=2, author=a2)
cls.rank1 = Ranking.objects.create(rank=1, author=a3)
cls.rank3 = Ranking.objects.create(rank=3, author=a1)
def test_ordering(self):
# Cross model ordering is possible in Meta, too.
self.assertSequenceEqual(
Ranking.objects.all(),
[self.rank3, self.rank2, self.rank1],
)
self.assertSequenceEqual(
Ranking.objects.order_by("rank"),
[self.rank1, self.rank2, self.rank3],
)
# Ordering of extra() pieces is possible, too and you can mix extra
# fields and model fields in the ordering.
self.assertSequenceEqual(
Ranking.objects.extra(
tables=["django_site"], order_by=["-django_site.id", "rank"]
),
[self.rank1, self.rank2, self.rank3],
)
sql = "case when %s > 2 then 1 else 0 end" % connection.ops.quote_name("rank")
qs = Ranking.objects.extra(select={"good": sql})
self.assertEqual(
[o.good for o in qs.extra(order_by=("-good",))], [True, False, False]
)
self.assertSequenceEqual(
qs.extra(order_by=("-good", "id")),
[self.rank3, self.rank2, self.rank1],
)
# Despite having some extra aliases in the query, we can still omit
# them in a values() query.
dicts = qs.values("id", "rank").order_by("id")
self.assertEqual([d["rank"] for d in dicts], [2, 1, 3])
def test_ticket7256(self):
# An empty values() call includes all aliases, including those from an
# extra()
sql = "case when %s > 2 then 1 else 0 end" % connection.ops.quote_name("rank")
qs = Ranking.objects.extra(select={"good": sql})
dicts = qs.values().order_by("id")
for d in dicts:
del d["id"]
del d["author_id"]
self.assertEqual(
[sorted(d.items()) for d in dicts],
[
[("good", 0), ("rank", 2)],
[("good", 0), ("rank", 1)],
[("good", 1), ("rank", 3)],
],
)
def test_ticket7045(self):
# Extra tables used to crash SQL construction on the second use.
qs = Ranking.objects.extra(tables=["django_site"])
qs.query.get_compiler(qs.db).as_sql()
# test passes if this doesn't raise an exception.
qs.query.get_compiler(qs.db).as_sql()
def test_ticket9848(self):
# Make sure that updates which only filter on sub-tables don't
# inadvertently update the wrong records (bug #9848).
author_start = Author.objects.get(name="a1")
ranking_start = Ranking.objects.get(author__name="a1")
# Make sure that the IDs from different tables don't happen to match.
self.assertSequenceEqual(
Ranking.objects.filter(author__name="a1"),
[self.rank3],
)
self.assertEqual(Ranking.objects.filter(author__name="a1").update(rank=4636), 1)
r = Ranking.objects.get(author__name="a1")
self.assertEqual(r.id, ranking_start.id)
self.assertEqual(r.author.id, author_start.id)
self.assertEqual(r.rank, 4636)
r.rank = 3
r.save()
self.assertSequenceEqual(
Ranking.objects.all(),
[self.rank3, self.rank2, self.rank1],
)
def test_ticket5261(self):
# Test different empty excludes.
self.assertSequenceEqual(
Note.objects.exclude(Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.filter(~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.filter(~Q() | ~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.exclude(~Q() & ~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.exclude(~Q() ^ ~Q()),
[self.n1, self.n2],
)
def test_extra_select_literal_percent_s(self):
# Allow %%s to escape select clauses
self.assertEqual(Note.objects.extra(select={"foo": "'%%s'"})[0].foo, "%s")
self.assertEqual(
Note.objects.extra(select={"foo": "'%%s bar %%s'"})[0].foo, "%s bar %s"
)
self.assertEqual(
Note.objects.extra(select={"foo": "'bar %%s'"})[0].foo, "bar %s"
)
def test_extra_select_alias_sql_injection(self):
crafted_alias = """injected_name" from "queries_note"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Note.objects.extra(select={crafted_alias: "1"})
def test_queryset_reuse(self):
# Using querysets doesn't mutate aliases.
authors = Author.objects.filter(Q(name="a1") | Q(name="nonexistent"))
self.assertEqual(Ranking.objects.filter(author__in=authors).get(), self.rank3)
self.assertEqual(authors.count(), 1)
def test_filter_unsaved_object(self):
# These tests will catch ValueError in Django 5.0 when passing unsaved
# model instances to related filters becomes forbidden.
# msg = "Model instances passed to related filters must be saved."
msg = "Passing unsaved model instances to related filters is deprecated."
company = Company.objects.create(name="Django")
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.filter(employer=Company(name="unsaved"))
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.filter(employer__in=[company, Company(name="unsaved")])
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
StaffUser.objects.filter(staff=Staff(name="unsaved"))
class SelectRelatedTests(TestCase):
def test_tickets_3045_3288(self):
# Once upon a time, select_related() with circular relations would loop
# infinitely if you forgot to specify "depth". Now we set an arbitrary
# default upper bound.
self.assertQuerysetEqual(X.objects.all(), [])
self.assertQuerysetEqual(X.objects.select_related(), [])
class SubclassFKTests(TestCase):
def test_ticket7778(self):
# Model subclasses could not be deleted if a nullable foreign key
# relates to a model that relates back.
num_celebs = Celebrity.objects.count()
tvc = TvChef.objects.create(name="Huey")
self.assertEqual(Celebrity.objects.count(), num_celebs + 1)
Fan.objects.create(fan_of=tvc)
Fan.objects.create(fan_of=tvc)
tvc.delete()
# The parent object should have been deleted as well.
self.assertEqual(Celebrity.objects.count(), num_celebs)
class CustomPkTests(TestCase):
def test_ticket7371(self):
self.assertQuerysetEqual(Related.objects.order_by("custom"), [])
class NullableRelOrderingTests(TestCase):
def test_ticket10028(self):
# Ordering by model related to nullable relations(!) should use outer
# joins, so that all results are included.
p1 = Plaything.objects.create(name="p1")
self.assertSequenceEqual(Plaything.objects.all(), [p1])
def test_join_already_in_query(self):
# Ordering by model related to nullable relations should not change
# the join type of already existing joins.
Plaything.objects.create(name="p1")
s = SingleObject.objects.create(name="s")
r = RelatedObject.objects.create(single=s, f=1)
p2 = Plaything.objects.create(name="p2", others=r)
qs = Plaything.objects.filter(others__isnull=False).order_by("pk")
self.assertNotIn("JOIN", str(qs.query))
qs = Plaything.objects.filter(others__f__isnull=False).order_by("pk")
self.assertIn("INNER", str(qs.query))
qs = qs.order_by("others__single__name")
# The ordering by others__single__pk will add one new join (to single)
# and that join must be LEFT join. The already existing join to related
# objects must be kept INNER. So, we have both an INNER and a LEFT join
# in the query.
self.assertEqual(str(qs.query).count("LEFT"), 1)
self.assertEqual(str(qs.query).count("INNER"), 1)
self.assertSequenceEqual(qs, [p2])
class DisjunctiveFilterTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.e1 = ExtraInfo.objects.create(info="e1", note=cls.n1)
def test_ticket7872(self):
# Another variation on the disjunctive filtering theme.
# For the purposes of this regression test, it's important that there is no
# Join object related to the LeafA we create.
l1 = LeafA.objects.create(data="first")
self.assertSequenceEqual(LeafA.objects.all(), [l1])
self.assertSequenceEqual(
LeafA.objects.filter(Q(data="first") | Q(join__b__data="second")),
[l1],
)
def test_ticket8283(self):
# Checking that applying filters after a disjunction works correctly.
self.assertSequenceEqual(
(
ExtraInfo.objects.filter(note=self.n1)
| ExtraInfo.objects.filter(info="e2")
).filter(note=self.n1),
[self.e1],
)
self.assertSequenceEqual(
(
ExtraInfo.objects.filter(info="e2")
| ExtraInfo.objects.filter(note=self.n1)
).filter(note=self.n1),
[self.e1],
)
class Queries6Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name="t1", category=generic)
cls.t2 = Tag.objects.create(name="t2", parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name="t3", parent=cls.t1)
cls.t4 = Tag.objects.create(name="t4", parent=cls.t3)
cls.t5 = Tag.objects.create(name="t5", parent=cls.t3)
n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.ann1 = Annotation.objects.create(name="a1", tag=cls.t1)
cls.ann1.notes.add(n1)
cls.ann2 = Annotation.objects.create(name="a2", tag=cls.t4)
def test_parallel_iterators(self):
# Parallel iterators work.
qs = Tag.objects.all()
i1, i2 = iter(qs), iter(qs)
self.assertEqual(repr(next(i1)), "<Tag: t1>")
self.assertEqual(repr(next(i1)), "<Tag: t2>")
self.assertEqual(repr(next(i2)), "<Tag: t1>")
self.assertEqual(repr(next(i2)), "<Tag: t2>")
self.assertEqual(repr(next(i2)), "<Tag: t3>")
self.assertEqual(repr(next(i1)), "<Tag: t3>")
qs = X.objects.all()
self.assertFalse(qs)
self.assertFalse(qs)
def test_nested_queries_sql(self):
# Nested queries should not evaluate the inner query as part of constructing the
# SQL (so we should see a nested query here, indicated by two "SELECT" calls).
qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy"))
self.assertEqual(qs.query.get_compiler(qs.db).as_sql()[0].count("SELECT"), 2)
def test_tickets_8921_9188(self):
# Incorrect SQL was being generated for certain types of exclude()
# queries that crossed multi-valued relations (#8921, #9188 and some
# preemptively discovered cases).
self.assertSequenceEqual(
PointerA.objects.filter(connection__pointerb__id=1), []
)
self.assertSequenceEqual(
PointerA.objects.exclude(connection__pointerb__id=1), []
)
self.assertSequenceEqual(
Tag.objects.exclude(children=None),
[self.t1, self.t3],
)
# This example is tricky because the parent could be NULL, so only checking
# parents with annotations omits some results (tag t1, in this case).
self.assertSequenceEqual(
Tag.objects.exclude(parent__annotation__name="a1"),
[self.t1, self.t4, self.t5],
)
# The annotation->tag link is single values and tag->children links is
# multi-valued. So we have to split the exclude filter in the middle
# and then optimize the inner query without losing results.
self.assertSequenceEqual(
Annotation.objects.exclude(tag__children__name="t2"),
[self.ann2],
)
# Nested queries are possible (although should be used with care, since
# they have performance problems on backends like MySQL.
self.assertSequenceEqual(
Annotation.objects.filter(notes__in=Note.objects.filter(note="n1")),
[self.ann1],
)
def test_ticket3739(self):
# The all() method on querysets returns a copy of the queryset.
q1 = Tag.objects.order_by("name")
self.assertIsNot(q1, q1.all())
def test_ticket_11320(self):
qs = Tag.objects.exclude(category=None).exclude(category__name="foo")
self.assertEqual(str(qs.query).count(" INNER JOIN "), 1)
def test_distinct_ordered_sliced_subquery_aggregation(self):
self.assertEqual(
Tag.objects.distinct().order_by("category__name")[:3].count(), 3
)
def test_multiple_columns_with_the_same_name_slice(self):
self.assertEqual(
list(
Tag.objects.order_by("name").values_list("name", "category__name")[:2]
),
[("t1", "Generic"), ("t2", "Generic")],
)
self.assertSequenceEqual(
Tag.objects.order_by("name").select_related("category")[:2],
[self.t1, self.t2],
)
self.assertEqual(
list(Tag.objects.order_by("-name").values_list("name", "parent__name")[:2]),
[("t5", "t3"), ("t4", "t3")],
)
self.assertSequenceEqual(
Tag.objects.order_by("-name").select_related("parent")[:2],
[self.t5, self.t4],
)
def test_col_alias_quoted(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertEqual(
Tag.objects.values("parent")
.annotate(
tag_per_parent=Count("pk"),
)
.aggregate(Max("tag_per_parent")),
{"tag_per_parent__max": 2},
)
sql = captured_queries[0]["sql"]
self.assertIn("AS %s" % connection.ops.quote_name("col1"), sql)
def test_xor_subquery(self):
self.assertSequenceEqual(
Tag.objects.filter(
Exists(Tag.objects.filter(id=OuterRef("id"), name="t3"))
^ Exists(Tag.objects.filter(id=OuterRef("id"), parent=self.t1))
),
[self.t2],
)
class RawQueriesTests(TestCase):
@classmethod
def setUpTestData(cls):
Note.objects.create(note="n1", misc="foo", id=1)
def test_ticket14729(self):
# Test representation of raw query with one or few parameters passed as list
query = "SELECT * FROM queries_note WHERE note = %s"
params = ["n1"]
qs = Note.objects.raw(query, params=params)
self.assertEqual(
repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1>"
)
query = "SELECT * FROM queries_note WHERE note = %s and misc = %s"
params = ["n1", "foo"]
qs = Note.objects.raw(query, params=params)
self.assertEqual(
repr(qs),
"<RawQuerySet: SELECT * FROM queries_note WHERE note = n1 and misc = foo>",
)
class GeneratorExpressionTests(SimpleTestCase):
def test_ticket10432(self):
# Using an empty iterator as the rvalue for an "__in"
# lookup is legal.
self.assertCountEqual(Note.objects.filter(pk__in=iter(())), [])
class ComparisonTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
e1 = ExtraInfo.objects.create(info="e1", note=cls.n1)
cls.a2 = Author.objects.create(name="a2", num=2002, extra=e1)
def test_ticket8597(self):
# Regression tests for case-insensitive comparisons
item_ab = Item.objects.create(
name="a_b", created=datetime.datetime.now(), creator=self.a2, note=self.n1
)
item_xy = Item.objects.create(
name="x%y", created=datetime.datetime.now(), creator=self.a2, note=self.n1
)
self.assertSequenceEqual(
Item.objects.filter(name__iexact="A_b"),
[item_ab],
)
self.assertSequenceEqual(
Item.objects.filter(name__iexact="x%Y"),
[item_xy],
)
self.assertSequenceEqual(
Item.objects.filter(name__istartswith="A_b"),
[item_ab],
)
self.assertSequenceEqual(
Item.objects.filter(name__iendswith="A_b"),
[item_ab],
)
class ExistsSql(TestCase):
def test_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertFalse(Tag.objects.exists())
# Ok - so the exist query worked - but did it include too many columns?
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]["sql"]
id, name = connection.ops.quote_name("id"), connection.ops.quote_name("name")
self.assertNotIn(id, qstr)
self.assertNotIn(name, qstr)
def test_ticket_18414(self):
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="two", created=datetime.datetime.now())
self.assertTrue(Article.objects.exists())
self.assertTrue(Article.objects.distinct().exists())
self.assertTrue(Article.objects.distinct()[1:3].exists())
self.assertFalse(Article.objects.distinct()[1:1].exists())
@skipUnlessDBFeature("can_distinct_on_fields")
def test_ticket_18414_distinct_on(self):
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="two", created=datetime.datetime.now())
self.assertTrue(Article.objects.distinct("name").exists())
self.assertTrue(Article.objects.distinct("name")[1:2].exists())
self.assertFalse(Article.objects.distinct("name")[2:3].exists())
class QuerysetOrderedTests(unittest.TestCase):
"""
Tests for the Queryset.ordered attribute.
"""
def test_no_default_or_explicit_ordering(self):
self.assertIs(Annotation.objects.all().ordered, False)
def test_cleared_default_ordering(self):
self.assertIs(Tag.objects.all().ordered, True)
self.assertIs(Tag.objects.order_by().ordered, False)
def test_explicit_ordering(self):
self.assertIs(Annotation.objects.order_by("id").ordered, True)
def test_empty_queryset(self):
self.assertIs(Annotation.objects.none().ordered, True)
def test_order_by_extra(self):
self.assertIs(Annotation.objects.extra(order_by=["id"]).ordered, True)
def test_annotated_ordering(self):
qs = Annotation.objects.annotate(num_notes=Count("notes"))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by("num_notes").ordered, True)
def test_annotated_default_ordering(self):
qs = Tag.objects.annotate(num_notes=Count("pk"))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by("name").ordered, True)
def test_annotated_values_default_ordering(self):
qs = Tag.objects.values("name").annotate(num_notes=Count("pk"))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by("name").ordered, True)
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
class SubqueryTests(TestCase):
@classmethod
def setUpTestData(cls):
NamedCategory.objects.create(id=1, name="first")
NamedCategory.objects.create(id=2, name="second")
NamedCategory.objects.create(id=3, name="third")
NamedCategory.objects.create(id=4, name="fourth")
def test_ordered_subselect(self):
"Subselects honor any manual ordering"
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[0:2]
)
self.assertEqual(set(query.values_list("id", flat=True)), {3, 4})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[:2]
)
self.assertEqual(set(query.values_list("id", flat=True)), {3, 4})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:2]
)
self.assertEqual(set(query.values_list("id", flat=True)), {3})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[2:]
)
self.assertEqual(set(query.values_list("id", flat=True)), {1, 2})
def test_slice_subquery_and_query(self):
"""
Slice a query that has a sliced subquery
"""
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[0:2]
)[0:2]
self.assertEqual({x.id for x in query}, {3, 4})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:3]
)[1:3]
self.assertEqual({x.id for x in query}, {3})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[2:]
)[1:]
self.assertEqual({x.id for x in query}, {2})
def test_related_sliced_subquery(self):
"""
Related objects constraints can safely contain sliced subqueries.
refs #22434
"""
generic = NamedCategory.objects.create(id=5, name="Generic")
t1 = Tag.objects.create(name="t1", category=generic)
t2 = Tag.objects.create(name="t2", category=generic)
ManagedModel.objects.create(data="mm1", tag=t1, public=True)
mm2 = ManagedModel.objects.create(data="mm2", tag=t2, public=True)
query = ManagedModel.normal_manager.filter(
tag__in=Tag.objects.order_by("-id")[:1]
)
self.assertEqual({x.id for x in query}, {mm2.id})
def test_sliced_delete(self):
"Delete queries can safely contain sliced subqueries"
DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[0:1]
).delete()
self.assertEqual(
set(DumbCategory.objects.values_list("id", flat=True)), {1, 2, 3}
)
DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:2]
).delete()
self.assertEqual(set(DumbCategory.objects.values_list("id", flat=True)), {1, 3})
DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:]
).delete()
self.assertEqual(set(DumbCategory.objects.values_list("id", flat=True)), {3})
def test_distinct_ordered_sliced_subquery(self):
# Implicit values('id').
self.assertSequenceEqual(
NamedCategory.objects.filter(
id__in=NamedCategory.objects.distinct().order_by("name")[0:2],
)
.order_by("name")
.values_list("name", flat=True),
["first", "fourth"],
)
# Explicit values('id').
self.assertSequenceEqual(
NamedCategory.objects.filter(
id__in=NamedCategory.objects.distinct()
.order_by("-name")
.values("id")[0:2],
)
.order_by("name")
.values_list("name", flat=True),
["second", "third"],
)
# Annotated value.
self.assertSequenceEqual(
DumbCategory.objects.filter(
id__in=DumbCategory.objects.annotate(double_id=F("id") * 2)
.order_by("id")
.distinct()
.values("double_id")[0:2],
)
.order_by("id")
.values_list("id", flat=True),
[2, 4],
)
class QuerySetBitwiseOperationTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.school = School.objects.create()
cls.room_1 = Classroom.objects.create(
school=cls.school, has_blackboard=False, name="Room 1"
)
cls.room_2 = Classroom.objects.create(
school=cls.school, has_blackboard=True, name="Room 2"
)
cls.room_3 = Classroom.objects.create(
school=cls.school, has_blackboard=True, name="Room 3"
)
cls.room_4 = Classroom.objects.create(
school=cls.school, has_blackboard=False, name="Room 4"
)
tag = Tag.objects.create()
cls.annotation_1 = Annotation.objects.create(tag=tag)
annotation_2 = Annotation.objects.create(tag=tag)
note = cls.annotation_1.notes.create(tag=tag)
cls.base_user_1 = BaseUser.objects.create(annotation=cls.annotation_1)
cls.base_user_2 = BaseUser.objects.create(annotation=annotation_2)
cls.task = Task.objects.create(
owner=cls.base_user_2,
creator=cls.base_user_2,
note=note,
)
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_rhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)
qs2 = Classroom.objects.filter(has_blackboard=False)[:1]
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_3])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_lhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)[:1]
qs2 = Classroom.objects.filter(has_blackboard=False)
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_4])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_both_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=False)[:1]
qs2 = Classroom.objects.filter(has_blackboard=True)[:1]
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_both_slice_and_ordering(self):
qs1 = Classroom.objects.filter(has_blackboard=False).order_by("-pk")[:1]
qs2 = Classroom.objects.filter(has_blackboard=True).order_by("-name")[:1]
self.assertCountEqual(qs1 | qs2, [self.room_3, self.room_4])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_rhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)
qs2 = Classroom.objects.filter(has_blackboard=False)[:1]
self.assertCountEqual(qs1 ^ qs2, [self.room_1, self.room_2, self.room_3])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_lhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)[:1]
qs2 = Classroom.objects.filter(has_blackboard=False)
self.assertCountEqual(qs1 ^ qs2, [self.room_1, self.room_2, self.room_4])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_both_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=False)[:1]
qs2 = Classroom.objects.filter(has_blackboard=True)[:1]
self.assertCountEqual(qs1 ^ qs2, [self.room_1, self.room_2])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_both_slice_and_ordering(self):
qs1 = Classroom.objects.filter(has_blackboard=False).order_by("-pk")[:1]
qs2 = Classroom.objects.filter(has_blackboard=True).order_by("-name")[:1]
self.assertCountEqual(qs1 ^ qs2, [self.room_3, self.room_4])
def test_subquery_aliases(self):
combined = School.objects.filter(pk__isnull=False) & School.objects.filter(
Exists(
Classroom.objects.filter(
has_blackboard=True,
school=OuterRef("pk"),
)
),
)
self.assertSequenceEqual(combined, [self.school])
nested_combined = School.objects.filter(pk__in=combined.values("pk"))
self.assertSequenceEqual(nested_combined, [self.school])
def test_conflicting_aliases_during_combine(self):
qs1 = self.annotation_1.baseuser_set.all()
qs2 = BaseUser.objects.filter(
Q(owner__note__in=self.annotation_1.notes.all())
| Q(creator__note__in=self.annotation_1.notes.all())
)
self.assertSequenceEqual(qs1, [self.base_user_1])
self.assertSequenceEqual(qs2, [self.base_user_2])
self.assertCountEqual(qs2 | qs1, qs1 | qs2)
self.assertCountEqual(qs2 | qs1, [self.base_user_1, self.base_user_2])
class CloneTests(TestCase):
def test_evaluated_queryset_as_argument(self):
"""
If a queryset is already evaluated, it can still be used as a query arg.
"""
n = Note(note="Test1", misc="misc")
n.save()
e = ExtraInfo(info="good", note=n)
e.save()
n_list = Note.objects.all()
# Evaluate the Note queryset, populating the query cache
list(n_list)
# Make one of cached results unpickable.
n_list._result_cache[0].lock = Lock()
with self.assertRaises(TypeError):
pickle.dumps(n_list)
# Use the note queryset in a query, and evaluate
# that query in a way that involves cloning.
self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, "good")
def test_no_model_options_cloning(self):
"""
Cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta)
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail(
"Model options shouldn't be cloned."
)
try:
Note.objects.filter(pk__lte=F("pk") + 1).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
def test_no_fields_cloning(self):
"""
Cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta.get_field("misc"))
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail(
"Model fields shouldn't be cloned"
)
try:
Note.objects.filter(note=F("misc")).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
class EmptyQuerySetTests(SimpleTestCase):
def test_emptyqueryset_values(self):
# #14366 -- Calling .values() on an empty QuerySet and then cloning
# that should not cause an error
self.assertCountEqual(Number.objects.none().values("num").order_by("num"), [])
def test_values_subquery(self):
self.assertCountEqual(
Number.objects.filter(pk__in=Number.objects.none().values("pk")), []
)
self.assertCountEqual(
Number.objects.filter(pk__in=Number.objects.none().values_list("pk")), []
)
def test_ticket_19151(self):
# #19151 -- Calling .values() or .values_list() on an empty QuerySet
# should return an empty QuerySet and not cause an error.
q = Author.objects.none()
self.assertCountEqual(q.values(), [])
self.assertCountEqual(q.values_list(), [])
class ValuesQuerysetTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=72)
def test_flat_values_list(self):
qs = Number.objects.values_list("num")
qs = qs.values_list("num", flat=True)
self.assertSequenceEqual(qs, [72])
def test_extra_values(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={"value_plus_x": "num+%s", "value_minus_x": "num-%s"},
select_params=(1, 2),
)
qs = qs.order_by("value_minus_x")
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_values_order_twice(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={"value_plus_one": "num+1", "value_minus_one": "num-1"}
)
qs = qs.order_by("value_minus_one").order_by("value_plus_one")
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_values_order_multiple(self):
# Postgres doesn't allow constants in order by, so check for that.
qs = Number.objects.extra(
select={
"value_plus_one": "num+1",
"value_minus_one": "num-1",
"constant_value": "1",
}
)
qs = qs.order_by("value_plus_one", "value_minus_one", "constant_value")
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_values_order_in_extra(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={"value_plus_one": "num+1", "value_minus_one": "num-1"},
order_by=["value_minus_one"],
)
qs = qs.values("num")
def test_extra_select_params_values_order_in_extra(self):
# testing for 23259 issue
qs = Number.objects.extra(
select={"value_plus_x": "num+%s"},
select_params=[1],
order_by=["value_plus_x"],
)
qs = qs.filter(num=72)
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_multiple_select_params_values_order_by(self):
# testing for 23259 issue
qs = Number.objects.extra(
select={"value_plus_x": "num+%s", "value_minus_x": "num-%s"},
select_params=(72, 72),
)
qs = qs.order_by("value_minus_x")
qs = qs.filter(num=1)
qs = qs.values("num")
self.assertSequenceEqual(qs, [])
def test_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={"value_plus_one": "num+1"})
qs = qs.order_by("value_plus_one")
qs = qs.values_list("num")
self.assertSequenceEqual(qs, [(72,)])
def test_flat_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={"value_plus_one": "num+1"})
qs = qs.order_by("value_plus_one")
qs = qs.values_list("num", flat=True)
self.assertSequenceEqual(qs, [72])
def test_field_error_values_list(self):
# see #23443
msg = (
"Cannot resolve keyword %r into field. Join on 'name' not permitted."
% "foo"
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.values_list("name__foo")
def test_named_values_list_flat(self):
msg = "'flat' and 'named' can't be used together."
with self.assertRaisesMessage(TypeError, msg):
Number.objects.values_list("num", flat=True, named=True)
def test_named_values_list_bad_field_name(self):
msg = "Type names and field names must be valid identifiers: '1'"
with self.assertRaisesMessage(ValueError, msg):
Number.objects.extra(select={"1": "num+1"}).values_list(
"1", named=True
).first()
def test_named_values_list_with_fields(self):
qs = Number.objects.extra(select={"num2": "num+1"}).annotate(Count("id"))
values = qs.values_list("num", "num2", named=True).first()
self.assertEqual(type(values).__name__, "Row")
self.assertEqual(values._fields, ("num", "num2"))
self.assertEqual(values.num, 72)
self.assertEqual(values.num2, 73)
def test_named_values_list_without_fields(self):
qs = Number.objects.extra(select={"num2": "num+1"}).annotate(Count("id"))
values = qs.values_list(named=True).first()
self.assertEqual(type(values).__name__, "Row")
self.assertEqual(
values._fields,
("num2", "id", "num", "other_num", "another_num", "id__count"),
)
self.assertEqual(values.num, 72)
self.assertEqual(values.num2, 73)
self.assertEqual(values.id__count, 1)
def test_named_values_list_expression_with_default_alias(self):
expr = Count("id")
values = (
Number.objects.annotate(id__count1=expr)
.values_list(expr, "id__count1", named=True)
.first()
)
self.assertEqual(values._fields, ("id__count2", "id__count1"))
def test_named_values_list_expression(self):
expr = F("num") + 1
qs = Number.objects.annotate(combinedexpression1=expr).values_list(
expr, "combinedexpression1", named=True
)
values = qs.first()
self.assertEqual(values._fields, ("combinedexpression2", "combinedexpression1"))
def test_named_values_pickle(self):
value = Number.objects.values_list("num", "other_num", named=True).get()
self.assertEqual(value, (72, None))
self.assertEqual(pickle.loads(pickle.dumps(value)), value)
class QuerySetSupportsPythonIdioms(TestCase):
@classmethod
def setUpTestData(cls):
some_date = datetime.datetime(2014, 5, 16, 12, 1)
cls.articles = [
Article.objects.create(name=f"Article {i}", created=some_date)
for i in range(1, 8)
]
def get_ordered_articles(self):
return Article.objects.order_by("name")
def test_can_get_items_using_index_and_slice_notation(self):
self.assertEqual(self.get_ordered_articles()[0].name, "Article 1")
self.assertSequenceEqual(
self.get_ordered_articles()[1:3],
[self.articles[1], self.articles[2]],
)
def test_slicing_with_steps_can_be_used(self):
self.assertSequenceEqual(
self.get_ordered_articles()[::2],
[
self.articles[0],
self.articles[2],
self.articles[4],
self.articles[6],
],
)
def test_slicing_without_step_is_lazy(self):
with self.assertNumQueries(0):
self.get_ordered_articles()[0:5]
def test_slicing_with_tests_is_not_lazy(self):
with self.assertNumQueries(1):
self.get_ordered_articles()[0:5:3]
def test_slicing_can_slice_again_after_slicing(self):
self.assertSequenceEqual(
self.get_ordered_articles()[0:5][0:2],
[self.articles[0], self.articles[1]],
)
self.assertSequenceEqual(
self.get_ordered_articles()[0:5][4:], [self.articles[4]]
)
self.assertSequenceEqual(self.get_ordered_articles()[0:5][5:], [])
# Some more tests!
self.assertSequenceEqual(
self.get_ordered_articles()[2:][0:2],
[self.articles[2], self.articles[3]],
)
self.assertSequenceEqual(
self.get_ordered_articles()[2:][:2],
[self.articles[2], self.articles[3]],
)
self.assertSequenceEqual(
self.get_ordered_articles()[2:][2:3], [self.articles[4]]
)
# Using an offset without a limit is also possible.
self.assertSequenceEqual(
self.get_ordered_articles()[5:],
[self.articles[5], self.articles[6]],
)
def test_slicing_cannot_filter_queryset_once_sliced(self):
msg = "Cannot filter a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].filter(id=1)
def test_slicing_cannot_reorder_queryset_once_sliced(self):
msg = "Cannot reorder a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].order_by("id")
def test_slicing_cannot_combine_queries_once_sliced(self):
msg = "Cannot combine queries once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:1] & Article.objects.all()[4:5]
def test_slicing_negative_indexing_not_supported_for_single_element(self):
"""hint: inverting your ordering might do what you need"""
msg = "Negative indexing is not supported."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[-1]
def test_slicing_negative_indexing_not_supported_for_range(self):
"""hint: inverting your ordering might do what you need"""
msg = "Negative indexing is not supported."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[0:-5]
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[-1:]
def test_invalid_index(self):
msg = "QuerySet indices must be integers or slices, not str."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()["foo"]
def test_can_get_number_of_items_in_queryset_using_standard_len(self):
self.assertEqual(len(Article.objects.filter(name__exact="Article 1")), 1)
def test_can_combine_queries_using_and_and_or_operators(self):
s1 = Article.objects.filter(name__exact="Article 1")
s2 = Article.objects.filter(name__exact="Article 2")
self.assertSequenceEqual(
(s1 | s2).order_by("name"),
[self.articles[0], self.articles[1]],
)
self.assertSequenceEqual(s1 & s2, [])
class WeirdQuerysetSlicingTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=1)
Number.objects.create(num=2)
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="two", created=datetime.datetime.now())
Article.objects.create(name="three", created=datetime.datetime.now())
Article.objects.create(name="four", created=datetime.datetime.now())
food = Food.objects.create(name="spam")
Eaten.objects.create(meal="spam with eggs", food=food)
def test_tickets_7698_10202(self):
# People like to slice with '0' as the high-water mark.
self.assertQuerysetEqual(Article.objects.all()[0:0], [])
self.assertQuerysetEqual(Article.objects.all()[0:0][:10], [])
self.assertEqual(Article.objects.all()[:0].count(), 0)
msg = "Cannot change a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[:0].latest("created")
def test_empty_resultset_sql(self):
# ticket #12192
self.assertNumQueries(0, lambda: list(Number.objects.all()[1:1]))
def test_empty_sliced_subquery(self):
self.assertEqual(
Eaten.objects.filter(food__in=Food.objects.all()[0:0]).count(), 0
)
def test_empty_sliced_subquery_exclude(self):
self.assertEqual(
Eaten.objects.exclude(food__in=Food.objects.all()[0:0]).count(), 1
)
def test_zero_length_values_slicing(self):
n = 42
with self.assertNumQueries(0):
self.assertQuerysetEqual(Article.objects.values()[n:n], [])
self.assertQuerysetEqual(Article.objects.values_list()[n:n], [])
class EscapingTests(TestCase):
def test_ticket_7302(self):
# Reserved names are appropriately escaped
r_a = ReservedName.objects.create(name="a", order=42)
r_b = ReservedName.objects.create(name="b", order=37)
self.assertSequenceEqual(
ReservedName.objects.order_by("order"),
[r_b, r_a],
)
self.assertSequenceEqual(
ReservedName.objects.extra(
select={"stuff": "name"}, order_by=("order", "stuff")
),
[r_b, r_a],
)
class ToFieldTests(TestCase):
def test_in_query(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Eaten.objects.filter(food__in=[apple, pear])),
{lunch, dinner},
)
def test_in_subquery(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(
set(Eaten.objects.filter(food__in=Food.objects.filter(name="apple"))),
{lunch},
)
self.assertEqual(
set(
Eaten.objects.filter(
food__in=Food.objects.filter(name="apple").values("eaten__meal")
)
),
set(),
)
self.assertEqual(
set(Food.objects.filter(eaten__in=Eaten.objects.filter(meal="lunch"))),
{apple},
)
def test_nested_in_subquery(self):
extra = ExtraInfo.objects.create()
author = Author.objects.create(num=42, extra=extra)
report = Report.objects.create(creator=author)
comment = ReportComment.objects.create(report=report)
comments = ReportComment.objects.filter(
report__in=Report.objects.filter(
creator__in=extra.author_set.all(),
),
)
self.assertSequenceEqual(comments, [comment])
def test_reverse_in(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch_apple = Eaten.objects.create(food=apple, meal="lunch")
lunch_pear = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Food.objects.filter(eaten__in=[lunch_apple, lunch_pear])), {apple, pear}
)
def test_single_object(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=apple, meal="dinner")
self.assertEqual(set(Eaten.objects.filter(food=apple)), {lunch, dinner})
def test_single_object_reverse(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(set(Food.objects.filter(eaten=lunch)), {apple})
def test_recursive_fk(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(list(Node.objects.filter(parent=node1)), [node2])
def test_recursive_fk_reverse(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(list(Node.objects.filter(node=node2)), [node1])
class IsNullTests(TestCase):
def test_primary_key(self):
custom = CustomPk.objects.create(name="pk")
null = Related.objects.create()
notnull = Related.objects.create(custom=custom)
self.assertSequenceEqual(
Related.objects.filter(custom__isnull=False), [notnull]
)
self.assertSequenceEqual(Related.objects.filter(custom__isnull=True), [null])
def test_to_field(self):
apple = Food.objects.create(name="apple")
e1 = Eaten.objects.create(food=apple, meal="lunch")
e2 = Eaten.objects.create(meal="lunch")
self.assertSequenceEqual(
Eaten.objects.filter(food__isnull=False),
[e1],
)
self.assertSequenceEqual(
Eaten.objects.filter(food__isnull=True),
[e2],
)
class ConditionalTests(TestCase):
"""Tests whose execution depend on different environment conditions like
Python version or DB backend features"""
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
t1 = Tag.objects.create(name="t1", category=generic)
Tag.objects.create(name="t2", parent=t1, category=generic)
t3 = Tag.objects.create(name="t3", parent=t1)
Tag.objects.create(name="t4", parent=t3)
Tag.objects.create(name="t5", parent=t3)
def test_infinite_loop(self):
# If you're not careful, it's possible to introduce infinite loops via
# default ordering on foreign keys in a cycle. We detect that.
with self.assertRaisesMessage(FieldError, "Infinite loop caused by ordering."):
list(LoopX.objects.all()) # Force queryset evaluation with list()
with self.assertRaisesMessage(FieldError, "Infinite loop caused by ordering."):
list(LoopZ.objects.all()) # Force queryset evaluation with list()
# Note that this doesn't cause an infinite loop, since the default
# ordering on the Tag model is empty (and thus defaults to using "id"
# for the related field).
self.assertEqual(len(Tag.objects.order_by("parent")), 5)
# ... but you can still order in a non-recursive fashion among linked
# fields (the previous test failed because the default ordering was
# recursive).
self.assertQuerysetEqual(LoopX.objects.order_by("y__x__y__x__id"), [])
# When grouping without specifying ordering, we add an explicit "ORDER BY NULL"
# portion in MySQL to prevent unnecessary sorting.
@skipUnlessDBFeature("requires_explicit_null_ordering_when_grouping")
def test_null_ordering_added(self):
query = Tag.objects.values_list("parent_id", flat=True).order_by().query
query.group_by = ["parent_id"]
sql = query.get_compiler(DEFAULT_DB_ALIAS).as_sql()[0]
fragment = "ORDER BY "
pos = sql.find(fragment)
self.assertEqual(sql.find(fragment, pos + 1), -1)
self.assertEqual(sql.find("NULL", pos + len(fragment)), pos + len(fragment))
def test_in_list_limit(self):
# The "in" lookup works with lists of 1000 items or more.
# The numbers amount is picked to force three different IN batches
# for Oracle, yet to be less than 2100 parameter limit for MSSQL.
numbers = list(range(2050))
max_query_params = connection.features.max_query_params
if max_query_params is None or max_query_params >= len(numbers):
Number.objects.bulk_create(Number(num=num) for num in numbers)
for number in [1000, 1001, 2000, len(numbers)]:
with self.subTest(number=number):
self.assertEqual(
Number.objects.filter(num__in=numbers[:number]).count(), number
)
class UnionTests(unittest.TestCase):
"""
Tests for the union of two querysets. Bug #12252.
"""
@classmethod
def setUpTestData(cls):
objectas = []
objectbs = []
objectcs = []
a_info = ["one", "two", "three"]
for name in a_info:
o = ObjectA(name=name)
o.save()
objectas.append(o)
b_info = [
("un", 1, objectas[0]),
("deux", 2, objectas[0]),
("trois", 3, objectas[2]),
]
for name, number, objecta in b_info:
o = ObjectB(name=name, num=number, objecta=objecta)
o.save()
objectbs.append(o)
c_info = [("ein", objectas[2], objectbs[2]), ("zwei", objectas[1], objectbs[1])]
for name, objecta, objectb in c_info:
o = ObjectC(name=name, objecta=objecta, objectb=objectb)
o.save()
objectcs.append(o)
def check_union(self, model, Q1, Q2):
filter = model.objects.filter
self.assertEqual(set(filter(Q1) | filter(Q2)), set(filter(Q1 | Q2)))
self.assertEqual(set(filter(Q2) | filter(Q1)), set(filter(Q1 | Q2)))
def test_A_AB(self):
Q1 = Q(name="two")
Q2 = Q(objectb__name="deux")
self.check_union(ObjectA, Q1, Q2)
def test_A_AB2(self):
Q1 = Q(name="two")
Q2 = Q(objectb__name="deux", objectb__num=2)
self.check_union(ObjectA, Q1, Q2)
def test_AB_ACB(self):
Q1 = Q(objectb__name="deux")
Q2 = Q(objectc__objectb__name="deux")
self.check_union(ObjectA, Q1, Q2)
def test_BAB_BAC(self):
Q1 = Q(objecta__objectb__name="deux")
Q2 = Q(objecta__objectc__name="ein")
self.check_union(ObjectB, Q1, Q2)
def test_BAB_BACB(self):
Q1 = Q(objecta__objectb__name="deux")
Q2 = Q(objecta__objectc__objectb__name="trois")
self.check_union(ObjectB, Q1, Q2)
def test_BA_BCA__BAB_BAC_BCA(self):
Q1 = Q(objecta__name="one", objectc__objecta__name="two")
Q2 = Q(
objecta__objectc__name="ein",
objectc__objecta__name="three",
objecta__objectb__name="trois",
)
self.check_union(ObjectB, Q1, Q2)
class DefaultValuesInsertTest(TestCase):
def test_no_extra_params(self):
"""
Can create an instance of a model with only the PK field (#17056)."
"""
DumbCategory.objects.create()
class ExcludeTests(TestCase):
@classmethod
def setUpTestData(cls):
f1 = Food.objects.create(name="apples")
cls.f2 = Food.objects.create(name="oranges")
Eaten.objects.create(food=f1, meal="dinner")
cls.j1 = Job.objects.create(name="Manager")
cls.r1 = Responsibility.objects.create(description="Playing golf")
cls.j2 = Job.objects.create(name="Programmer")
cls.r2 = Responsibility.objects.create(description="Programming")
JobResponsibilities.objects.create(job=cls.j1, responsibility=cls.r1)
JobResponsibilities.objects.create(job=cls.j2, responsibility=cls.r2)
def test_to_field(self):
self.assertSequenceEqual(
Food.objects.exclude(eaten__meal="dinner"),
[self.f2],
)
self.assertSequenceEqual(
Job.objects.exclude(responsibilities__description="Playing golf"),
[self.j2],
)
self.assertSequenceEqual(
Responsibility.objects.exclude(jobs__name="Manager"),
[self.r2],
)
def test_ticket14511(self):
alex = Person.objects.get_or_create(name="Alex")[0]
jane = Person.objects.get_or_create(name="Jane")[0]
oracle = Company.objects.get_or_create(name="Oracle")[0]
google = Company.objects.get_or_create(name="Google")[0]
microsoft = Company.objects.get_or_create(name="Microsoft")[0]
intel = Company.objects.get_or_create(name="Intel")[0]
def employ(employer, employee, title):
Employment.objects.get_or_create(
employee=employee, employer=employer, title=title
)
employ(oracle, alex, "Engineer")
employ(oracle, alex, "Developer")
employ(google, alex, "Engineer")
employ(google, alex, "Manager")
employ(microsoft, alex, "Manager")
employ(intel, alex, "Manager")
employ(microsoft, jane, "Developer")
employ(intel, jane, "Manager")
alex_tech_employers = (
alex.employers.filter(employment__title__in=("Engineer", "Developer"))
.distinct()
.order_by("name")
)
self.assertSequenceEqual(alex_tech_employers, [google, oracle])
alex_nontech_employers = (
alex.employers.exclude(employment__title__in=("Engineer", "Developer"))
.distinct()
.order_by("name")
)
self.assertSequenceEqual(alex_nontech_employers, [google, intel, microsoft])
def test_exclude_reverse_fk_field_ref(self):
tag = Tag.objects.create()
Note.objects.create(tag=tag, note="note")
annotation = Annotation.objects.create(name="annotation", tag=tag)
self.assertEqual(
Annotation.objects.exclude(tag__note__note=F("name")).get(), annotation
)
def test_exclude_with_circular_fk_relation(self):
self.assertEqual(
ObjectB.objects.exclude(objecta__objectb__name=F("name")).count(), 0
)
def test_subquery_exclude_outerref(self):
qs = JobResponsibilities.objects.filter(
Exists(Responsibility.objects.exclude(jobs=OuterRef("job"))),
)
self.assertTrue(qs.exists())
self.r1.delete()
self.assertFalse(qs.exists())
def test_exclude_nullable_fields(self):
number = Number.objects.create(num=1, other_num=1)
Number.objects.create(num=2, other_num=2, another_num=2)
self.assertSequenceEqual(
Number.objects.exclude(other_num=F("another_num")),
[number],
)
self.assertSequenceEqual(
Number.objects.exclude(num=F("another_num")),
[number],
)
def test_exclude_multivalued_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertSequenceEqual(
Job.objects.exclude(responsibilities__description="Programming"),
[self.j1],
)
self.assertIn("exists", captured_queries[0]["sql"].lower())
def test_exclude_subquery(self):
subquery = JobResponsibilities.objects.filter(
responsibility__description="bar",
) | JobResponsibilities.objects.exclude(
job__responsibilities__description="foo",
)
self.assertCountEqual(
Job.objects.annotate(
responsibility=subquery.filter(job=OuterRef("name"),).values(
"id"
)[:1]
),
[self.j1, self.j2],
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_exclude_unsaved_o2o_object(self):
jack = Staff.objects.create(name="jack")
jack_staff = StaffUser.objects.create(staff=jack)
unsaved_object = Staff(name="jane")
self.assertIsNone(unsaved_object.pk)
self.assertSequenceEqual(
StaffUser.objects.exclude(staff=unsaved_object), [jack_staff]
)
def test_exclude_unsaved_object(self):
# These tests will catch ValueError in Django 5.0 when passing unsaved
# model instances to related filters becomes forbidden.
# msg = "Model instances passed to related filters must be saved."
company = Company.objects.create(name="Django")
msg = "Passing unsaved model instances to related filters is deprecated."
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.exclude(employer=Company(name="unsaved"))
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.exclude(employer__in=[company, Company(name="unsaved")])
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
StaffUser.objects.exclude(staff=Staff(name="unsaved"))
class ExcludeTest17600(TestCase):
"""
Some regressiontests for ticket #17600. Some of these likely duplicate
other existing tests.
"""
@classmethod
def setUpTestData(cls):
# Create a few Orders.
cls.o1 = Order.objects.create(pk=1)
cls.o2 = Order.objects.create(pk=2)
cls.o3 = Order.objects.create(pk=3)
# Create some OrderItems for the first order with homogeneous
# status_id values
cls.oi1 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi2 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi3 = OrderItem.objects.create(order=cls.o1, status=1)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi4 = OrderItem.objects.create(order=cls.o2, status=1)
cls.oi5 = OrderItem.objects.create(order=cls.o2, status=2)
cls.oi6 = OrderItem.objects.create(order=cls.o2, status=3)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi7 = OrderItem.objects.create(order=cls.o3, status=2)
cls.oi8 = OrderItem.objects.create(order=cls.o3, status=3)
cls.oi9 = OrderItem.objects.create(order=cls.o3, status=4)
def test_exclude_plain(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(items__status=1),
[self.o3],
)
def test_exclude_plain_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(items__status=1).distinct(),
[self.o3],
)
def test_exclude_with_q_object_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(Q(items__status=1)).distinct(),
[self.o3],
)
def test_exclude_with_q_object_no_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(Q(items__status=1)),
[self.o3],
)
def test_exclude_with_q_is_equal_to_plain_exclude(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1).distinct()),
list(Order.objects.exclude(Q(items__status=1)).distinct()),
)
def test_exclude_with_q_is_equal_to_plain_exclude_variation(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1)),
list(Order.objects.exclude(Q(items__status=1)).distinct()),
)
@unittest.expectedFailure
def test_only_orders_with_all_items_having_status_1(self):
"""
This should only return orders having ALL items set to status 1, or
those items not having any orders at all. The correct way to write
this query in SQL seems to be using two nested subqueries.
"""
self.assertQuerysetEqual(
Order.objects.exclude(~Q(items__status=1)).distinct(),
[self.o1],
)
class Exclude15786(TestCase):
"""Regression test for #15786"""
def test_ticket15786(self):
c1 = SimpleCategory.objects.create(name="c1")
c2 = SimpleCategory.objects.create(name="c2")
OneToOneCategory.objects.create(category=c1)
OneToOneCategory.objects.create(category=c2)
rel = CategoryRelationship.objects.create(first=c1, second=c2)
self.assertEqual(
CategoryRelationship.objects.exclude(
first__onetoonecategory=F("second__onetoonecategory")
).get(),
rel,
)
class NullInExcludeTest(TestCase):
@classmethod
def setUpTestData(cls):
NullableName.objects.create(name="i1")
NullableName.objects.create()
def test_null_in_exclude_qs(self):
none_val = "" if connection.features.interprets_empty_strings_as_nulls else None
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=[]),
["i1", none_val],
attrgetter("name"),
)
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=["i1"]),
[none_val],
attrgetter("name"),
)
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=["i3"]),
["i1", none_val],
attrgetter("name"),
)
inner_qs = NullableName.objects.filter(name="i1").values_list("name")
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=inner_qs),
[none_val],
attrgetter("name"),
)
# The inner queryset wasn't executed - it should be turned
# into subquery above
self.assertIs(inner_qs._result_cache, None)
@unittest.expectedFailure
def test_col_not_in_list_containing_null(self):
"""
The following case is not handled properly because
SQL's COL NOT IN (list containing null) handling is too weird to
abstract away.
"""
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=[None]), ["i1"], attrgetter("name")
)
def test_double_exclude(self):
self.assertEqual(
list(NullableName.objects.filter(~~Q(name="i1"))),
list(NullableName.objects.filter(Q(name="i1"))),
)
self.assertNotIn(
"IS NOT NULL", str(NullableName.objects.filter(~~Q(name="i1")).query)
)
class EmptyStringsAsNullTest(TestCase):
"""
Filtering on non-null character fields works as expected.
The reason for these tests is that Oracle treats '' as NULL, and this
can cause problems in query construction. Refs #17957.
"""
@classmethod
def setUpTestData(cls):
cls.nc = NamedCategory.objects.create(name="")
def test_direct_exclude(self):
self.assertQuerysetEqual(
NamedCategory.objects.exclude(name__in=["nonexistent"]),
[self.nc.pk],
attrgetter("pk"),
)
def test_joined_exclude(self):
self.assertQuerysetEqual(
DumbCategory.objects.exclude(namedcategory__name__in=["nonexistent"]),
[self.nc.pk],
attrgetter("pk"),
)
def test_21001(self):
foo = NamedCategory.objects.create(name="foo")
self.assertQuerysetEqual(
NamedCategory.objects.exclude(name=""), [foo.pk], attrgetter("pk")
)
class ProxyQueryCleanupTest(TestCase):
def test_evaluated_proxy_count(self):
"""
Generating the query string doesn't alter the query's state
in irreversible ways. Refs #18248.
"""
ProxyCategory.objects.create()
qs = ProxyCategory.objects.all()
self.assertEqual(qs.count(), 1)
str(qs.query)
self.assertEqual(qs.count(), 1)
class WhereNodeTest(SimpleTestCase):
class DummyNode:
def as_sql(self, compiler, connection):
return "dummy", []
class MockCompiler:
def compile(self, node):
return node.as_sql(self, connection)
def __call__(self, name):
return connection.ops.quote_name(name)
def test_empty_full_handling_conjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[NothingNode()])
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("", []))
w = WhereNode(children=[self.DummyNode(), self.DummyNode()])
self.assertEqual(w.as_sql(compiler, connection), ("(dummy AND dummy)", []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("NOT (dummy AND dummy)", []))
w = WhereNode(children=[NothingNode(), self.DummyNode()])
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("", []))
def test_empty_full_handling_disjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[NothingNode()], connector="OR")
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("", []))
w = WhereNode(children=[self.DummyNode(), self.DummyNode()], connector="OR")
self.assertEqual(w.as_sql(compiler, connection), ("(dummy OR dummy)", []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("NOT (dummy OR dummy)", []))
w = WhereNode(children=[NothingNode(), self.DummyNode()], connector="OR")
self.assertEqual(w.as_sql(compiler, connection), ("dummy", []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("NOT (dummy)", []))
def test_empty_nodes(self):
compiler = WhereNodeTest.MockCompiler()
empty_w = WhereNode()
w = WhereNode(children=[empty_w, empty_w])
self.assertEqual(w.as_sql(compiler, connection), ("", []))
w.negate()
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.connector = "OR"
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("", []))
w = WhereNode(children=[empty_w, NothingNode()], connector="OR")
self.assertEqual(w.as_sql(compiler, connection), ("", []))
w = WhereNode(children=[empty_w, NothingNode()], connector="AND")
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
class QuerySetExceptionTests(SimpleTestCase):
def test_iter_exceptions(self):
qs = ExtraInfo.objects.only("author")
msg = "'ManyToOneRel' object has no attribute 'attname'"
with self.assertRaisesMessage(AttributeError, msg):
list(qs)
def test_invalid_order_by(self):
msg = "Cannot resolve keyword '*' into field. Choices are: created, id, name"
with self.assertRaisesMessage(FieldError, msg):
Article.objects.order_by("*")
def test_invalid_order_by_raw_column_alias(self):
msg = (
"Cannot resolve keyword 'queries_author.name' into field. Choices "
"are: cover, created, creator, creator_id, id, modified, name, "
"note, note_id, tags"
)
with self.assertRaisesMessage(FieldError, msg):
Item.objects.values("creator__name").order_by("queries_author.name")
def test_invalid_queryset_model(self):
msg = 'Cannot use QuerySet for "Article": Use a QuerySet for "ExtraInfo".'
with self.assertRaisesMessage(ValueError, msg):
list(Author.objects.filter(extra=Article.objects.all()))
class NullJoinPromotionOrTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.d1 = ModelD.objects.create(name="foo")
d2 = ModelD.objects.create(name="bar")
cls.a1 = ModelA.objects.create(name="a1", d=cls.d1)
c = ModelC.objects.create(name="c")
b = ModelB.objects.create(name="b", c=c)
cls.a2 = ModelA.objects.create(name="a2", b=b, d=d2)
def test_ticket_17886(self):
# The first Q-object is generating the match, the rest of the filters
# should not remove the match even if they do not match anything. The
# problem here was that b__name generates a LOUTER JOIN, then
# b__c__name generates join to c, which the ORM tried to promote but
# failed as that join isn't nullable.
q_obj = Q(d__name="foo") | Q(b__name="foo") | Q(b__c__name="foo")
qset = ModelA.objects.filter(q_obj)
self.assertEqual(list(qset), [self.a1])
# We generate one INNER JOIN to D. The join is direct and not nullable
# so we can use INNER JOIN for it. However, we can NOT use INNER JOIN
# for the b->c join, as a->b is nullable.
self.assertEqual(str(qset.query).count("INNER JOIN"), 1)
def test_isnull_filter_promotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count("LEFT OUTER"), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count("LEFT OUTER"), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count("LEFT OUTER"), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(list(qs), [self.a2])
def test_null_join_demotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=False) & Q(b__name__isnull=True))
self.assertIn(" INNER JOIN ", str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) & Q(b__name__isnull=False))
self.assertIn(" INNER JOIN ", str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=False) | Q(b__name__isnull=True))
self.assertIn(" LEFT OUTER JOIN ", str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) | Q(b__name__isnull=False))
self.assertIn(" LEFT OUTER JOIN ", str(qs.query))
def test_ticket_21366(self):
n = Note.objects.create(note="n", misc="m")
e = ExtraInfo.objects.create(info="info", note=n)
a = Author.objects.create(name="Author1", num=1, extra=e)
Ranking.objects.create(rank=1, author=a)
r1 = Report.objects.create(name="Foo", creator=a)
r2 = Report.objects.create(name="Bar")
Report.objects.create(name="Bar", creator=a)
qs = Report.objects.filter(
Q(creator__ranking__isnull=True) | Q(creator__ranking__rank=1, name="Foo")
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
self.assertEqual(str(qs.query).count(" JOIN "), 2)
self.assertSequenceEqual(qs.order_by("name"), [r2, r1])
def test_ticket_21748(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
i3 = Identifier.objects.create(name="i3")
Program.objects.create(identifier=i1)
Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
self.assertSequenceEqual(
Identifier.objects.filter(program=None, channel=None), [i3]
)
self.assertSequenceEqual(
Identifier.objects.exclude(program=None, channel=None).order_by("name"),
[i1, i2],
)
def test_ticket_21748_double_negated_and(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
Identifier.objects.create(name="i3")
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
# Check the ~~Q() (or equivalently .exclude(~Q)) works like Q() for
# join promotion.
qs1_doubleneg = Identifier.objects.exclude(
~Q(program__id=p1.id, channel__id=c1.id)
).order_by("pk")
qs1_filter = Identifier.objects.filter(
program__id=p1.id, channel__id=c1.id
).order_by("pk")
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(
str(qs1_filter.query).count("JOIN"), str(qs1_doubleneg.query).count("JOIN")
)
self.assertEqual(2, str(qs1_doubleneg.query).count("INNER JOIN"))
self.assertEqual(
str(qs1_filter.query).count("INNER JOIN"),
str(qs1_doubleneg.query).count("INNER JOIN"),
)
def test_ticket_21748_double_negated_or(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
Identifier.objects.create(name="i3")
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Test OR + doubleneg. The expected result is that channel is LOUTER
# joined, program INNER joined
qs1_filter = Identifier.objects.filter(
Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id)
).order_by("pk")
qs1_doubleneg = Identifier.objects.exclude(
~Q(Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id))
).order_by("pk")
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(
str(qs1_filter.query).count("JOIN"), str(qs1_doubleneg.query).count("JOIN")
)
self.assertEqual(1, str(qs1_doubleneg.query).count("INNER JOIN"))
self.assertEqual(
str(qs1_filter.query).count("INNER JOIN"),
str(qs1_doubleneg.query).count("INNER JOIN"),
)
def test_ticket_21748_complex_filter(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
Identifier.objects.create(name="i3")
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Finally, a more complex case, one time in a way where each
# NOT is pushed to lowest level in the boolean tree, and
# another query where this isn't done.
qs1 = Identifier.objects.filter(
~Q(~Q(program__id=p2.id, channel__id=c1.id) & Q(program__id=p1.id))
).order_by("pk")
qs2 = Identifier.objects.filter(
Q(Q(program__id=p2.id, channel__id=c1.id) | ~Q(program__id=p1.id))
).order_by("pk")
self.assertQuerysetEqual(qs1, qs2, lambda x: x)
self.assertEqual(str(qs1.query).count("JOIN"), str(qs2.query).count("JOIN"))
self.assertEqual(0, str(qs1.query).count("INNER JOIN"))
self.assertEqual(
str(qs1.query).count("INNER JOIN"), str(qs2.query).count("INNER JOIN")
)
class ReverseJoinTrimmingTest(TestCase):
def test_reverse_trimming(self):
# We don't accidentally trim reverse joins - we can't know if there is
# anything on the other side of the join, so trimming reverse joins
# can't be done, ever.
t = Tag.objects.create()
qs = Tag.objects.filter(annotation__tag=t.pk)
self.assertIn("INNER JOIN", str(qs.query))
self.assertEqual(list(qs), [])
class JoinReuseTest(TestCase):
"""
The queries reuse joins sensibly (for example, direct joins
are always reused).
"""
def test_fk_reuse(self):
qs = Annotation.objects.filter(tag__name="foo").filter(tag__name="bar")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_select_related(self):
qs = Annotation.objects.filter(tag__name="foo").select_related("tag")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_annotation(self):
qs = Annotation.objects.filter(tag__name="foo").annotate(cnt=Count("tag__name"))
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_disjunction(self):
qs = Annotation.objects.filter(Q(tag__name="foo") | Q(tag__name="bar"))
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_order_by(self):
qs = Annotation.objects.filter(tag__name="foo").order_by("tag__name")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_revo2o_reuse(self):
qs = Detail.objects.filter(member__name="foo").filter(member__name="foo")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_revfk_noreuse(self):
qs = Author.objects.filter(report__name="r4").filter(report__name="r1")
self.assertEqual(str(qs.query).count("JOIN"), 2)
def test_inverted_q_across_relations(self):
"""
When a trimmable join is specified in the query (here school__), the
ORM detects it and removes unnecessary joins. The set of reusable joins
are updated after trimming the query so that other lookups don't
consider that the outer query's filters are in effect for the subquery
(#26551).
"""
springfield_elementary = School.objects.create()
hogward = School.objects.create()
Student.objects.create(school=springfield_elementary)
hp = Student.objects.create(school=hogward)
Classroom.objects.create(school=hogward, name="Potion")
Classroom.objects.create(school=springfield_elementary, name="Main")
qs = Student.objects.filter(
~(
Q(school__classroom__name="Main")
& Q(school__classroom__has_blackboard=None)
)
)
self.assertSequenceEqual(qs, [hp])
class DisjunctionPromotionTests(TestCase):
def test_disjunction_promotion_select_related(self):
fk1 = FK1.objects.create(f1="f1", f2="f2")
basea = BaseA.objects.create(a=fk1)
qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2))
self.assertEqual(str(qs.query).count(" JOIN "), 0)
qs = qs.select_related("a", "b")
self.assertEqual(str(qs.query).count(" INNER JOIN "), 0)
self.assertEqual(str(qs.query).count(" LEFT OUTER JOIN "), 2)
with self.assertNumQueries(1):
self.assertSequenceEqual(qs, [basea])
self.assertEqual(qs[0].a, fk1)
self.assertIs(qs[0].b, None)
def test_disjunction_promotion1(self):
# Pre-existing join, add two ORed filters to the same join,
# all joins can be INNER JOINS.
qs = BaseA.objects.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = qs.filter(Q(b__f1="foo") | Q(b__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
# Reverse the order of AND and OR filters.
qs = BaseA.objects.filter(Q(b__f1="foo") | Q(b__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = qs.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
def test_disjunction_promotion2(self):
qs = BaseA.objects.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
# Now we have two different joins in an ORed condition, these
# must be OUTER joins. The pre-existing join should remain INNER.
qs = qs.filter(Q(b__f1="foo") | Q(c__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
# Reverse case.
qs = BaseA.objects.filter(Q(b__f1="foo") | Q(c__f2="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
qs = qs.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
def test_disjunction_promotion3(self):
qs = BaseA.objects.filter(a__f2="bar")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
# The ANDed a__f2 filter allows us to use keep using INNER JOIN
# even inside the ORed case. If the join to a__ returns nothing,
# the ANDed filter for a__f2 can't be true.
qs = qs.filter(Q(a__f1="foo") | Q(b__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
def test_disjunction_promotion3_demote(self):
# This one needs demotion logic: the first filter causes a to be
# outer joined, the second filter makes it inner join again.
qs = BaseA.objects.filter(Q(a__f1="foo") | Q(b__f2="foo")).filter(a__f2="bar")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
def test_disjunction_promotion4_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("JOIN"), 0)
# Demote needed for the "a" join. It is marked as outer join by
# above filter (even if it is trimmed away).
qs = qs.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
def test_disjunction_promotion4(self):
qs = BaseA.objects.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
def test_disjunction_promotion5_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
# Note that the above filters on a force the join to an
# inner join even if it is trimmed.
self.assertEqual(str(qs.query).count("JOIN"), 0)
qs = qs.filter(Q(a__f1="foo") | Q(b__f1="foo"))
# So, now the a__f1 join doesn't need promotion.
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
# But b__f1 does.
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
qs = BaseA.objects.filter(Q(a__f1="foo") | Q(b__f1="foo"))
# Now the join to a is created as LOUTER
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
def test_disjunction_promotion6(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("JOIN"), 0)
qs = BaseA.objects.filter(Q(a__f1="foo") & Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 0)
qs = BaseA.objects.filter(Q(a__f1="foo") & Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 0)
def test_disjunction_promotion7(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("JOIN"), 0)
qs = BaseA.objects.filter(Q(a__f1="foo") | (Q(b__f1="foo") & Q(a__f1="bar")))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
qs = BaseA.objects.filter(
(Q(a__f1="foo") | Q(b__f1="foo")) & (Q(a__f1="bar") | Q(c__f1="foo"))
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 3)
self.assertEqual(str(qs.query).count("INNER JOIN"), 0)
qs = BaseA.objects.filter(
Q(a__f1="foo") | Q(a__f1="bar") & (Q(b__f1="bar") | Q(c__f1="foo"))
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
def test_disjunction_promotion_fexpression(self):
qs = BaseA.objects.filter(Q(a__f1=F("b__f1")) | Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = BaseA.objects.filter(Q(a__f1=F("c__f1")) | Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 3)
qs = BaseA.objects.filter(
Q(a__f1=F("b__f1")) | Q(a__f2=F("b__f2")) | Q(c__f1="foo")
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 3)
qs = BaseA.objects.filter(Q(a__f1=F("c__f1")) | (Q(pk=1) & Q(pk=2)))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
self.assertEqual(str(qs.query).count("INNER JOIN"), 0)
class ManyToManyExcludeTest(TestCase):
def test_exclude_many_to_many(self):
i_extra = Identifier.objects.create(name="extra")
i_program = Identifier.objects.create(name="program")
program = Program.objects.create(identifier=i_program)
i_channel = Identifier.objects.create(name="channel")
channel = Channel.objects.create(identifier=i_channel)
channel.programs.add(program)
# channel contains 'program1', so all Identifiers except that one
# should be returned
self.assertSequenceEqual(
Identifier.objects.exclude(program__channel=channel).order_by("name"),
[i_channel, i_extra],
)
self.assertSequenceEqual(
Identifier.objects.exclude(program__channel=None).order_by("name"),
[i_program],
)
def test_ticket_12823(self):
pg3 = Page.objects.create(text="pg3")
pg2 = Page.objects.create(text="pg2")
pg1 = Page.objects.create(text="pg1")
pa1 = Paragraph.objects.create(text="pa1")
pa1.page.set([pg1, pg2])
pa2 = Paragraph.objects.create(text="pa2")
pa2.page.set([pg2, pg3])
pa3 = Paragraph.objects.create(text="pa3")
ch1 = Chapter.objects.create(title="ch1", paragraph=pa1)
ch2 = Chapter.objects.create(title="ch2", paragraph=pa2)
ch3 = Chapter.objects.create(title="ch3", paragraph=pa3)
b1 = Book.objects.create(title="b1", chapter=ch1)
b2 = Book.objects.create(title="b2", chapter=ch2)
b3 = Book.objects.create(title="b3", chapter=ch3)
q = Book.objects.exclude(chapter__paragraph__page__text="pg1")
self.assertNotIn("IS NOT NULL", str(q.query))
self.assertEqual(len(q), 2)
self.assertNotIn(b1, q)
self.assertIn(b2, q)
self.assertIn(b3, q)
class RelabelCloneTest(TestCase):
def test_ticket_19964(self):
my1 = MyObject.objects.create(data="foo")
my1.parent = my1
my1.save()
my2 = MyObject.objects.create(data="bar", parent=my1)
parents = MyObject.objects.filter(parent=F("id"))
children = MyObject.objects.filter(parent__in=parents).exclude(parent=F("id"))
self.assertEqual(list(parents), [my1])
# Evaluating the children query (which has parents as part of it) does
# not change results for the parents query.
self.assertEqual(list(children), [my2])
self.assertEqual(list(parents), [my1])
class Ticket20101Tests(TestCase):
def test_ticket_20101(self):
"""
Tests QuerySet ORed combining in exclude subquery case.
"""
t = Tag.objects.create(name="foo")
a1 = Annotation.objects.create(tag=t, name="a1")
a2 = Annotation.objects.create(tag=t, name="a2")
a3 = Annotation.objects.create(tag=t, name="a3")
n = Note.objects.create(note="foo", misc="bar")
qs1 = Note.objects.exclude(annotation__in=[a1, a2])
qs2 = Note.objects.filter(annotation__in=[a3])
self.assertIn(n, qs1)
self.assertNotIn(n, qs2)
self.assertIn(n, (qs1 | qs2))
class EmptyStringPromotionTests(SimpleTestCase):
def test_empty_string_promotion(self):
qs = RelatedObject.objects.filter(single__name="")
if connection.features.interprets_empty_strings_as_nulls:
self.assertIn("LEFT OUTER JOIN", str(qs.query))
else:
self.assertNotIn("LEFT OUTER JOIN", str(qs.query))
class ValuesSubqueryTests(TestCase):
def test_values_in_subquery(self):
# If a values() queryset is used, then the given values
# will be used instead of forcing use of the relation's field.
o1 = Order.objects.create(id=-2)
o2 = Order.objects.create(id=-1)
oi1 = OrderItem.objects.create(order=o1, status=0)
oi1.status = oi1.pk
oi1.save()
OrderItem.objects.create(order=o2, status=0)
# The query below should match o1 as it has related order_item
# with id == status.
self.assertSequenceEqual(
Order.objects.filter(items__in=OrderItem.objects.values_list("status")),
[o1],
)
class DoubleInSubqueryTests(TestCase):
def test_double_subquery_in(self):
lfa1 = LeafA.objects.create(data="foo")
lfa2 = LeafA.objects.create(data="bar")
lfb1 = LeafB.objects.create(data="lfb1")
lfb2 = LeafB.objects.create(data="lfb2")
Join.objects.create(a=lfa1, b=lfb1)
Join.objects.create(a=lfa2, b=lfb2)
leaf_as = LeafA.objects.filter(data="foo").values_list("pk", flat=True)
joins = Join.objects.filter(a__in=leaf_as).values_list("b__id", flat=True)
qs = LeafB.objects.filter(pk__in=joins)
self.assertSequenceEqual(qs, [lfb1])
class Ticket18785Tests(SimpleTestCase):
def test_ticket_18785(self):
# Test join trimming from ticket18785
qs = (
Item.objects.exclude(note__isnull=False)
.filter(name="something", creator__extra__isnull=True)
.order_by()
)
self.assertEqual(1, str(qs.query).count("INNER JOIN"))
self.assertEqual(0, str(qs.query).count("OUTER JOIN"))
class Ticket20788Tests(TestCase):
def test_ticket_20788(self):
Paragraph.objects.create()
paragraph = Paragraph.objects.create()
page = paragraph.page.create()
chapter = Chapter.objects.create(paragraph=paragraph)
Book.objects.create(chapter=chapter)
paragraph2 = Paragraph.objects.create()
Page.objects.create()
chapter2 = Chapter.objects.create(paragraph=paragraph2)
book2 = Book.objects.create(chapter=chapter2)
sentences_not_in_pub = Book.objects.exclude(chapter__paragraph__page=page)
self.assertSequenceEqual(sentences_not_in_pub, [book2])
class Ticket12807Tests(TestCase):
def test_ticket_12807(self):
p1 = Paragraph.objects.create()
p2 = Paragraph.objects.create()
# The ORed condition below should have no effect on the query - the
# ~Q(pk__in=[]) will always be True.
qs = Paragraph.objects.filter((Q(pk=p2.pk) | ~Q(pk__in=[])) & Q(pk=p1.pk))
self.assertSequenceEqual(qs, [p1])
class RelatedLookupTypeTests(TestCase):
error = 'Cannot query "%s": Must be "%s" instance.'
@classmethod
def setUpTestData(cls):
cls.oa = ObjectA.objects.create(name="oa")
cls.poa = ProxyObjectA.objects.get(name="oa")
cls.coa = ChildObjectA.objects.create(name="coa")
cls.wrong_type = Order.objects.create(id=cls.oa.pk)
cls.ob = ObjectB.objects.create(name="ob", objecta=cls.oa, num=1)
cls.pob1 = ProxyObjectB.objects.create(name="pob", objecta=cls.oa, num=2)
cls.pob = ProxyObjectB.objects.all()
cls.c = ObjectC.objects.create(childobjecta=cls.coa)
def test_wrong_type_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup.
"""
# Passing incorrect object type
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)
):
ObjectB.objects.get(objecta=self.wrong_type)
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)
):
ObjectB.objects.filter(objecta__in=[self.wrong_type])
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)
):
ObjectB.objects.filter(objecta=self.wrong_type)
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)
):
ObjectA.objects.filter(objectb__in=[self.wrong_type, self.ob])
# Passing an object of the class on which query is done.
with self.assertRaisesMessage(
ValueError, self.error % (self.ob, ObjectA._meta.object_name)
):
ObjectB.objects.filter(objecta__in=[self.poa, self.ob])
with self.assertRaisesMessage(
ValueError, self.error % (self.ob, ChildObjectA._meta.object_name)
):
ObjectC.objects.exclude(childobjecta__in=[self.coa, self.ob])
def test_wrong_backward_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup for backward relations.
"""
with self.assertRaisesMessage(
ValueError, self.error % (self.oa, ObjectB._meta.object_name)
):
ObjectA.objects.filter(objectb__in=[self.oa, self.ob])
with self.assertRaisesMessage(
ValueError, self.error % (self.oa, ObjectB._meta.object_name)
):
ObjectA.objects.exclude(objectb=self.oa)
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)
):
ObjectA.objects.get(objectb=self.wrong_type)
def test_correct_lookup(self):
"""
When passing proxy model objects, child objects, or parent objects,
lookups work fine.
"""
out_a = [self.oa]
out_b = [self.ob, self.pob1]
out_c = [self.c]
# proxy model objects
self.assertSequenceEqual(
ObjectB.objects.filter(objecta=self.poa).order_by("name"), out_b
)
self.assertSequenceEqual(
ObjectA.objects.filter(objectb__in=self.pob).order_by("pk"), out_a * 2
)
# child objects
self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.coa]), [])
self.assertSequenceEqual(
ObjectB.objects.filter(objecta__in=[self.poa, self.coa]).order_by("name"),
out_b,
)
self.assertSequenceEqual(
ObjectB.objects.filter(objecta__in=iter([self.poa, self.coa])).order_by(
"name"
),
out_b,
)
# parent objects
self.assertSequenceEqual(ObjectC.objects.exclude(childobjecta=self.oa), out_c)
# QuerySet related object type checking shouldn't issue queries
# (the querysets aren't evaluated here, hence zero queries) (#23266).
with self.assertNumQueries(0):
ObjectB.objects.filter(objecta__in=ObjectA.objects.all())
def test_values_queryset_lookup(self):
"""
ValueQuerySets are not checked for compatibility with the lookup field.
"""
# Make sure the num and objecta field values match.
ob = ObjectB.objects.get(name="ob")
ob.num = ob.objecta.pk
ob.save()
pob = ObjectB.objects.get(name="pob")
pob.num = pob.objecta.pk
pob.save()
self.assertSequenceEqual(
ObjectB.objects.filter(
objecta__in=ObjectB.objects.values_list("num")
).order_by("pk"),
[ob, pob],
)
class Ticket14056Tests(TestCase):
def test_ticket_14056(self):
s1 = SharedConnection.objects.create(data="s1")
s2 = SharedConnection.objects.create(data="s2")
s3 = SharedConnection.objects.create(data="s3")
PointerA.objects.create(connection=s2)
expected_ordering = (
[s1, s3, s2] if connection.features.nulls_order_largest else [s2, s1, s3]
)
self.assertSequenceEqual(
SharedConnection.objects.order_by("-pointera__connection", "pk"),
expected_ordering,
)
class Ticket20955Tests(TestCase):
def test_ticket_20955(self):
jack = Staff.objects.create(name="jackstaff")
jackstaff = StaffUser.objects.create(staff=jack)
jill = Staff.objects.create(name="jillstaff")
jillstaff = StaffUser.objects.create(staff=jill)
task = Task.objects.create(creator=jackstaff, owner=jillstaff, title="task")
task_get = Task.objects.get(pk=task.pk)
# Load data so that assertNumQueries doesn't complain about the get
# version's queries.
task_get.creator.staffuser.staff
task_get.owner.staffuser.staff
qs = Task.objects.select_related(
"creator__staffuser__staff", "owner__staffuser__staff"
)
self.assertEqual(str(qs.query).count(" JOIN "), 6)
task_select_related = qs.get(pk=task.pk)
with self.assertNumQueries(0):
self.assertEqual(
task_select_related.creator.staffuser.staff,
task_get.creator.staffuser.staff,
)
self.assertEqual(
task_select_related.owner.staffuser.staff,
task_get.owner.staffuser.staff,
)
class Ticket21203Tests(TestCase):
def test_ticket_21203(self):
p = Ticket21203Parent.objects.create(parent_bool=True)
c = Ticket21203Child.objects.create(parent=p)
qs = Ticket21203Child.objects.select_related("parent").defer("parent__created")
self.assertSequenceEqual(qs, [c])
self.assertIs(qs[0].parent.parent_bool, True)
class ValuesJoinPromotionTests(TestCase):
def test_values_no_promotion_for_existing(self):
qs = Node.objects.filter(parent__parent__isnull=False)
self.assertIn(" INNER JOIN ", str(qs.query))
qs = qs.values("parent__parent__id")
self.assertIn(" INNER JOIN ", str(qs.query))
# Make sure there is a left outer join without the filter.
qs = Node.objects.values("parent__parent__id")
self.assertIn(" LEFT OUTER JOIN ", str(qs.query))
def test_non_nullable_fk_not_promoted(self):
qs = ObjectB.objects.values("objecta__name")
self.assertIn(" INNER JOIN ", str(qs.query))
def test_ticket_21376(self):
a = ObjectA.objects.create()
ObjectC.objects.create(objecta=a)
qs = ObjectC.objects.filter(
Q(objecta=a) | Q(objectb__objecta=a),
)
qs = qs.filter(
Q(objectb=1) | Q(objecta=a),
)
self.assertEqual(qs.count(), 1)
tblname = connection.ops.quote_name(ObjectB._meta.db_table)
self.assertIn(" LEFT OUTER JOIN %s" % tblname, str(qs.query))
class ForeignKeyToBaseExcludeTests(TestCase):
def test_ticket_21787(self):
sc1 = SpecialCategory.objects.create(special_name="sc1", name="sc1")
sc2 = SpecialCategory.objects.create(special_name="sc2", name="sc2")
sc3 = SpecialCategory.objects.create(special_name="sc3", name="sc3")
c1 = CategoryItem.objects.create(category=sc1)
CategoryItem.objects.create(category=sc2)
self.assertSequenceEqual(
SpecialCategory.objects.exclude(categoryitem__id=c1.pk).order_by("name"),
[sc2, sc3],
)
self.assertSequenceEqual(
SpecialCategory.objects.filter(categoryitem__id=c1.pk), [sc1]
)
class ReverseM2MCustomPkTests(TestCase):
def test_ticket_21879(self):
cpt1 = CustomPkTag.objects.create(id="cpt1", tag="cpt1")
cp1 = CustomPk.objects.create(name="cp1", extra="extra")
cp1.custompktag_set.add(cpt1)
self.assertSequenceEqual(CustomPk.objects.filter(custompktag=cpt1), [cp1])
self.assertSequenceEqual(CustomPkTag.objects.filter(custom_pk=cp1), [cpt1])
class Ticket22429Tests(TestCase):
def test_ticket_22429(self):
sc1 = School.objects.create()
st1 = Student.objects.create(school=sc1)
sc2 = School.objects.create()
st2 = Student.objects.create(school=sc2)
cr = Classroom.objects.create(school=sc1)
cr.students.add(st1)
queryset = Student.objects.filter(~Q(classroom__school=F("school")))
self.assertSequenceEqual(queryset, [st2])
class Ticket23605Tests(TestCase):
def test_ticket_23605(self):
# Test filtering on a complicated q-object from ticket's report.
# The query structure is such that we have multiple nested subqueries.
# The original problem was that the inner queries weren't relabeled
# correctly.
# See also #24090.
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=10000.0)
Ticket23605B.objects.create(
field_b0=10000.0, field_b1=True, modelc_fk=c1, modela_fk=a1
)
complex_q = Q(
pk__in=Ticket23605A.objects.filter(
Q(
# True for a1 as field_b0 = 10000, field_c0=10000
# False for a2 as no ticket23605b found
ticket23605b__field_b0__gte=1000000
/ F("ticket23605b__modelc_fk__field_c0")
)
&
# True for a1 (field_b1=True)
Q(ticket23605b__field_b1=True)
& ~Q(
ticket23605b__pk__in=Ticket23605B.objects.filter(
~(
# Same filters as above commented filters, but
# double-negated (one for Q() above, one for
# parentheses). So, again a1 match, a2 not.
Q(field_b1=True)
& Q(field_b0__gte=1000000 / F("modelc_fk__field_c0"))
)
)
)
).filter(ticket23605b__field_b1=True)
)
qs1 = Ticket23605A.objects.filter(complex_q)
self.assertSequenceEqual(qs1, [a1])
qs2 = Ticket23605A.objects.exclude(complex_q)
self.assertSequenceEqual(qs2, [a2])
class TestTicket24279(TestCase):
def test_ticket_24278(self):
School.objects.create()
qs = School.objects.filter(Q(pk__in=()) | Q())
self.assertQuerysetEqual(qs, [])
class TestInvalidValuesRelation(SimpleTestCase):
def test_invalid_values(self):
msg = "Field 'id' expected a number but got 'abc'."
with self.assertRaisesMessage(ValueError, msg):
Annotation.objects.filter(tag="abc")
with self.assertRaisesMessage(ValueError, msg):
Annotation.objects.filter(tag__in=[123, "abc"])
class TestTicket24605(TestCase):
def test_ticket_24605(self):
"""
Subquery table names should be quoted.
"""
i1 = Individual.objects.create(alive=True)
RelatedIndividual.objects.create(related=i1)
i2 = Individual.objects.create(alive=False)
RelatedIndividual.objects.create(related=i2)
i3 = Individual.objects.create(alive=True)
i4 = Individual.objects.create(alive=False)
self.assertSequenceEqual(
Individual.objects.filter(
Q(alive=False), Q(related_individual__isnull=True)
),
[i4],
)
self.assertSequenceEqual(
Individual.objects.exclude(
Q(alive=False), Q(related_individual__isnull=True)
).order_by("pk"),
[i1, i2, i3],
)
class Ticket23622Tests(TestCase):
@skipUnlessDBFeature("can_distinct_on_fields")
def test_ticket_23622(self):
"""
Make sure __pk__in and __in work the same for related fields when
using a distinct on subquery.
"""
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=0.0)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=123,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=23,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=234,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=12,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=567,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=76,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=7,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=56,
field_b1=True,
modelc_fk=c1,
)
qx = Q(
ticket23605b__pk__in=Ticket23605B.objects.order_by(
"modela_fk", "-field_b1"
).distinct("modela_fk")
) & Q(ticket23605b__field_b0__gte=300)
qy = Q(
ticket23605b__in=Ticket23605B.objects.order_by(
"modela_fk", "-field_b1"
).distinct("modela_fk")
) & Q(ticket23605b__field_b0__gte=300)
self.assertEqual(
set(Ticket23605A.objects.filter(qx).values_list("pk", flat=True)),
set(Ticket23605A.objects.filter(qy).values_list("pk", flat=True)),
)
self.assertSequenceEqual(Ticket23605A.objects.filter(qx), [a2])
|
9fa71e01ce9f42d727f6d81ac3d642423e9c4814d335871c46da893b53fc1f61 | import json
import unittest
import xml.etree.ElementTree
from django.db import NotSupportedError, connection, transaction
from django.db.models import Count
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from .models import Tag
@skipUnlessDBFeature("supports_explaining_query_execution")
class ExplainTests(TestCase):
def test_basic(self):
querysets = [
Tag.objects.filter(name="test"),
Tag.objects.filter(name="test").select_related("parent"),
Tag.objects.filter(name="test").prefetch_related("children"),
Tag.objects.filter(name="test").annotate(Count("children")),
Tag.objects.filter(name="test").values_list("name"),
Tag.objects.order_by().union(Tag.objects.order_by().filter(name="test")),
Tag.objects.select_for_update().filter(name="test"),
]
supported_formats = connection.features.supported_explain_formats
all_formats = (
(None,)
+ tuple(supported_formats)
+ tuple(f.lower() for f in supported_formats)
)
for idx, queryset in enumerate(querysets):
for format in all_formats:
with self.subTest(format=format, queryset=idx):
with self.assertNumQueries(1), CaptureQueriesContext(
connection
) as captured_queries:
result = queryset.explain(format=format)
self.assertTrue(
captured_queries[0]["sql"].startswith(
connection.ops.explain_prefix
)
)
self.assertIsInstance(result, str)
self.assertTrue(result)
if format == "xml":
try:
xml.etree.ElementTree.fromstring(result)
except xml.etree.ElementTree.ParseError as e:
self.fail(
f"QuerySet.explain() result is not valid XML: {e}"
)
elif format == "json":
try:
json.loads(result)
except json.JSONDecodeError as e:
self.fail(
f"QuerySet.explain() result is not valid JSON: {e}"
)
def test_unknown_options(self):
with self.assertRaisesMessage(ValueError, "Unknown options: TEST, TEST2"):
Tag.objects.explain(**{"TEST": 1, "TEST2": 1})
def test_unknown_format(self):
msg = "DOES NOT EXIST is not a recognized format."
if connection.features.supported_explain_formats:
msg += " Allowed formats: %s" % ", ".join(
sorted(connection.features.supported_explain_formats)
)
with self.assertRaisesMessage(ValueError, msg):
Tag.objects.explain(format="does not exist")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_postgres_options(self):
qs = Tag.objects.filter(name="test")
test_options = [
{"COSTS": False, "BUFFERS": True, "ANALYZE": True},
{"costs": False, "buffers": True, "analyze": True},
{"verbose": True, "timing": True, "analyze": True},
{"verbose": False, "timing": False, "analyze": True},
{"summary": True},
]
if connection.features.is_postgresql_12:
test_options.append({"settings": True})
if connection.features.is_postgresql_13:
test_options.append({"analyze": True, "wal": True})
for options in test_options:
with self.subTest(**options), transaction.atomic():
with CaptureQueriesContext(connection) as captured_queries:
qs.explain(format="text", **options)
self.assertEqual(len(captured_queries), 1)
for name, value in options.items():
option = "{} {}".format(name.upper(), "true" if value else "false")
self.assertIn(option, captured_queries[0]["sql"])
def test_option_sql_injection(self):
qs = Tag.objects.filter(name="test")
options = {"SUMMARY true) SELECT 1; --": True}
msg = "Invalid option name: 'SUMMARY true) SELECT 1; --'"
with self.assertRaisesMessage(ValueError, msg):
qs.explain(**options)
def test_invalid_option_names(self):
qs = Tag.objects.filter(name="test")
tests = [
'opt"ion',
"o'ption",
"op`tion",
"opti on",
"option--",
"optio\tn",
"o\nption",
"option;",
"你 好",
# [] are used by MSSQL.
"option[",
"option]",
]
for invalid_option in tests:
with self.subTest(invalid_option):
msg = f"Invalid option name: {invalid_option!r}"
with self.assertRaisesMessage(ValueError, msg):
qs.explain(**{invalid_option: True})
@unittest.skipUnless(connection.vendor == "mysql", "MySQL specific")
def test_mysql_text_to_traditional(self):
# Ensure these cached properties are initialized to prevent queries for
# the MariaDB or MySQL version during the QuerySet evaluation.
connection.features.supported_explain_formats
with CaptureQueriesContext(connection) as captured_queries:
Tag.objects.filter(name="test").explain(format="text")
self.assertEqual(len(captured_queries), 1)
self.assertIn("FORMAT=TRADITIONAL", captured_queries[0]["sql"])
@unittest.skipUnless(
connection.vendor == "mysql", "MariaDB and MySQL >= 8.0.18 specific."
)
def test_mysql_analyze(self):
qs = Tag.objects.filter(name="test")
with CaptureQueriesContext(connection) as captured_queries:
qs.explain(analyze=True)
self.assertEqual(len(captured_queries), 1)
prefix = "ANALYZE " if connection.mysql_is_mariadb else "EXPLAIN ANALYZE "
self.assertTrue(captured_queries[0]["sql"].startswith(prefix))
with CaptureQueriesContext(connection) as captured_queries:
qs.explain(analyze=True, format="JSON")
self.assertEqual(len(captured_queries), 1)
if connection.mysql_is_mariadb:
self.assertIn("FORMAT=JSON", captured_queries[0]["sql"])
else:
self.assertNotIn("FORMAT=JSON", captured_queries[0]["sql"])
@skipIfDBFeature("supports_explaining_query_execution")
class ExplainUnsupportedTests(TestCase):
def test_message(self):
msg = "This backend does not support explaining query execution."
with self.assertRaisesMessage(NotSupportedError, msg):
Tag.objects.filter(name="test").explain()
|
d5ed86cc2d050425b8f10e54abb803680c9c547b65feb28ad0951c8cb7fce1bd | from pathlib import Path
from unittest import mock
from django.template import autoreload
from django.test import SimpleTestCase, override_settings
from django.test.utils import require_jinja2
ROOT = Path(__file__).parent.absolute()
EXTRA_TEMPLATES_DIR = ROOT / "templates_extra"
@override_settings(
INSTALLED_APPS=["template_tests"],
TEMPLATES=[
{
"BACKEND": "django.template.backends.dummy.TemplateStrings",
"APP_DIRS": True,
},
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [EXTRA_TEMPLATES_DIR],
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
],
"loaders": [
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
},
},
],
)
class TemplateReloadTests(SimpleTestCase):
@mock.patch("django.template.autoreload.reset_loaders")
def test_template_changed(self, mock_reset):
template_path = Path(__file__).parent / "templates" / "index.html"
self.assertTrue(autoreload.template_changed(None, template_path))
mock_reset.assert_called_once()
@mock.patch("django.template.autoreload.reset_loaders")
def test_non_template_changed(self, mock_reset):
self.assertIsNone(autoreload.template_changed(None, Path(__file__)))
mock_reset.assert_not_called()
@override_settings(
TEMPLATES=[
{
"DIRS": [ROOT],
"BACKEND": "django.template.backends.django.DjangoTemplates",
}
]
)
@mock.patch("django.template.autoreload.reset_loaders")
def test_non_template_changed_in_template_directory(self, mock_reset):
self.assertIsNone(autoreload.template_changed(None, Path(__file__)))
mock_reset.assert_not_called()
def test_watch_for_template_changes(self):
mock_reloader = mock.MagicMock()
autoreload.watch_for_template_changes(mock_reloader)
self.assertSequenceEqual(
sorted(mock_reloader.watch_dir.call_args_list),
[
mock.call(ROOT / "templates", "**/*"),
mock.call(ROOT / "templates_extra", "**/*"),
],
)
def test_get_template_directories(self):
self.assertSetEqual(
autoreload.get_template_directories(),
{
ROOT / "templates_extra",
ROOT / "templates",
},
)
@mock.patch("django.template.loaders.base.Loader.reset")
def test_reset_all_loaders(self, mock_reset):
autoreload.reset_loaders()
self.assertEqual(mock_reset.call_count, 2)
@override_settings(
TEMPLATES=[
{
"DIRS": [""],
"BACKEND": "django.template.backends.django.DjangoTemplates",
}
]
)
def test_template_dirs_ignore_empty_path(self):
self.assertEqual(autoreload.get_template_directories(), set())
@override_settings(
TEMPLATES=[
{
"DIRS": [
str(ROOT) + "/absolute_str",
"template_tests/relative_str",
Path("template_tests/relative_path"),
],
"BACKEND": "django.template.backends.django.DjangoTemplates",
}
]
)
def test_template_dirs_normalized_to_paths(self):
self.assertSetEqual(
autoreload.get_template_directories(),
{
ROOT / "absolute_str",
Path.cwd() / "template_tests/relative_str",
Path.cwd() / "template_tests/relative_path",
},
)
@require_jinja2
@override_settings(INSTALLED_APPS=["template_tests"])
class Jinja2TemplateReloadTests(SimpleTestCase):
def test_watch_for_template_changes(self):
mock_reloader = mock.MagicMock()
autoreload.watch_for_template_changes(mock_reloader)
self.assertSequenceEqual(
sorted(mock_reloader.watch_dir.call_args_list),
[
mock.call(ROOT / "templates", "**/*"),
],
)
def test_get_template_directories(self):
self.assertSetEqual(
autoreload.get_template_directories(),
{
ROOT / "templates",
},
)
@mock.patch("django.template.loaders.base.Loader.reset")
def test_reset_all_loaders(self, mock_reset):
autoreload.reset_loaders()
self.assertEqual(mock_reset.call_count, 0)
|
51c7ed2db4ffb0fb3d9761489e95b20585aa500e81d9234cbfaf65ac501ae512 | """
Regression tests for Model inheritance behavior.
"""
import datetime
from operator import attrgetter
from unittest import expectedFailure
from django import forms
from django.test import TestCase
from .models import (
ArticleWithAuthor,
BachelorParty,
BirthdayParty,
BusStation,
Child,
Congressman,
DerivedM,
InternalCertificationAudit,
ItalianRestaurant,
M2MChild,
MessyBachelorParty,
ParkingLot,
ParkingLot3,
ParkingLot4A,
ParkingLot4B,
Person,
Place,
Politician,
Profile,
QualityControl,
Restaurant,
SelfRefChild,
SelfRefParent,
Senator,
Supplier,
TrainStation,
User,
Wholesaler,
)
class ModelInheritanceTest(TestCase):
def test_model_inheritance(self):
# Regression for #7350, #7202
# When you create a Parent object with a specific reference to an
# existent child instance, saving the Parent doesn't duplicate the
# child. This behavior is only activated during a raw save - it is
# mostly relevant to deserialization, but any sort of CORBA style
# 'narrow()' API would require a similar approach.
# Create a child-parent-grandparent chain
place1 = Place(name="Guido's House of Pasta", address="944 W. Fullerton")
place1.save_base(raw=True)
restaurant = Restaurant(
place_ptr=place1,
serves_hot_dogs=True,
serves_pizza=False,
)
restaurant.save_base(raw=True)
italian_restaurant = ItalianRestaurant(
restaurant_ptr=restaurant, serves_gnocchi=True
)
italian_restaurant.save_base(raw=True)
# Create a child-parent chain with an explicit parent link
place2 = Place(name="Main St", address="111 Main St")
place2.save_base(raw=True)
park = ParkingLot(parent=place2, capacity=100)
park.save_base(raw=True)
# No extra parent objects have been created.
places = list(Place.objects.all())
self.assertEqual(places, [place1, place2])
dicts = list(Restaurant.objects.values("name", "serves_hot_dogs"))
self.assertEqual(
dicts, [{"name": "Guido's House of Pasta", "serves_hot_dogs": True}]
)
dicts = list(
ItalianRestaurant.objects.values(
"name", "serves_hot_dogs", "serves_gnocchi"
)
)
self.assertEqual(
dicts,
[
{
"name": "Guido's House of Pasta",
"serves_gnocchi": True,
"serves_hot_dogs": True,
}
],
)
dicts = list(ParkingLot.objects.values("name", "capacity"))
self.assertEqual(
dicts,
[
{
"capacity": 100,
"name": "Main St",
}
],
)
# You can also update objects when using a raw save.
place1.name = "Guido's All New House of Pasta"
place1.save_base(raw=True)
restaurant.serves_hot_dogs = False
restaurant.save_base(raw=True)
italian_restaurant.serves_gnocchi = False
italian_restaurant.save_base(raw=True)
place2.name = "Derelict lot"
place2.save_base(raw=True)
park.capacity = 50
park.save_base(raw=True)
# No extra parent objects after an update, either.
places = list(Place.objects.all())
self.assertEqual(places, [place2, place1])
self.assertEqual(places[0].name, "Derelict lot")
self.assertEqual(places[1].name, "Guido's All New House of Pasta")
dicts = list(Restaurant.objects.values("name", "serves_hot_dogs"))
self.assertEqual(
dicts,
[
{
"name": "Guido's All New House of Pasta",
"serves_hot_dogs": False,
}
],
)
dicts = list(
ItalianRestaurant.objects.values(
"name", "serves_hot_dogs", "serves_gnocchi"
)
)
self.assertEqual(
dicts,
[
{
"name": "Guido's All New House of Pasta",
"serves_gnocchi": False,
"serves_hot_dogs": False,
}
],
)
dicts = list(ParkingLot.objects.values("name", "capacity"))
self.assertEqual(
dicts,
[
{
"capacity": 50,
"name": "Derelict lot",
}
],
)
# If you try to raw_save a parent attribute onto a child object,
# the attribute will be ignored.
italian_restaurant.name = "Lorenzo's Pasta Hut"
italian_restaurant.save_base(raw=True)
# Note that the name has not changed
# - name is an attribute of Place, not ItalianRestaurant
dicts = list(
ItalianRestaurant.objects.values(
"name", "serves_hot_dogs", "serves_gnocchi"
)
)
self.assertEqual(
dicts,
[
{
"name": "Guido's All New House of Pasta",
"serves_gnocchi": False,
"serves_hot_dogs": False,
}
],
)
def test_issue_7105(self):
# Regressions tests for #7105: dates() queries should be able to use
# fields from the parent model as easily as the child.
Child.objects.create(
name="child", created=datetime.datetime(2008, 6, 26, 17, 0, 0)
)
datetimes = list(Child.objects.datetimes("created", "month"))
self.assertEqual(datetimes, [datetime.datetime(2008, 6, 1, 0, 0)])
def test_issue_7276(self):
# Regression test for #7276: calling delete() on a model with
# multi-table inheritance should delete the associated rows from any
# ancestor tables, as well as any descendent objects.
place1 = Place(name="Guido's House of Pasta", address="944 W. Fullerton")
place1.save_base(raw=True)
restaurant = Restaurant(
place_ptr=place1,
serves_hot_dogs=True,
serves_pizza=False,
)
restaurant.save_base(raw=True)
italian_restaurant = ItalianRestaurant(
restaurant_ptr=restaurant, serves_gnocchi=True
)
italian_restaurant.save_base(raw=True)
ident = ItalianRestaurant.objects.all()[0].id
self.assertEqual(Place.objects.get(pk=ident), place1)
Restaurant.objects.create(
name="a",
address="xx",
serves_hot_dogs=True,
serves_pizza=False,
)
# This should delete both Restaurants, plus the related places, plus
# the ItalianRestaurant.
Restaurant.objects.all().delete()
with self.assertRaises(Place.DoesNotExist):
Place.objects.get(pk=ident)
with self.assertRaises(ItalianRestaurant.DoesNotExist):
ItalianRestaurant.objects.get(pk=ident)
def test_issue_6755(self):
"""
Regression test for #6755
"""
r = Restaurant(serves_pizza=False, serves_hot_dogs=False)
r.save()
self.assertEqual(r.id, r.place_ptr_id)
orig_id = r.id
r = Restaurant(place_ptr_id=orig_id, serves_pizza=True, serves_hot_dogs=False)
r.save()
self.assertEqual(r.id, orig_id)
self.assertEqual(r.id, r.place_ptr_id)
def test_issue_11764(self):
"""
Regression test for #11764
"""
wholesalers = list(Wholesaler.objects.select_related())
self.assertEqual(wholesalers, [])
def test_issue_7853(self):
"""
Regression test for #7853
If the parent class has a self-referential link, make sure that any
updates to that link via the child update the right table.
"""
obj = SelfRefChild.objects.create(child_data=37, parent_data=42)
obj.delete()
def test_get_next_previous_by_date(self):
"""
Regression tests for #8076
get_(next/previous)_by_date should work
"""
c1 = ArticleWithAuthor(
headline="ArticleWithAuthor 1",
author="Person 1",
pub_date=datetime.datetime(2005, 8, 1, 3, 0),
)
c1.save()
c2 = ArticleWithAuthor(
headline="ArticleWithAuthor 2",
author="Person 2",
pub_date=datetime.datetime(2005, 8, 1, 10, 0),
)
c2.save()
c3 = ArticleWithAuthor(
headline="ArticleWithAuthor 3",
author="Person 3",
pub_date=datetime.datetime(2005, 8, 2),
)
c3.save()
self.assertEqual(c1.get_next_by_pub_date(), c2)
self.assertEqual(c2.get_next_by_pub_date(), c3)
with self.assertRaises(ArticleWithAuthor.DoesNotExist):
c3.get_next_by_pub_date()
self.assertEqual(c3.get_previous_by_pub_date(), c2)
self.assertEqual(c2.get_previous_by_pub_date(), c1)
with self.assertRaises(ArticleWithAuthor.DoesNotExist):
c1.get_previous_by_pub_date()
def test_inherited_fields(self):
"""
Regression test for #8825 and #9390
Make sure all inherited fields (esp. m2m fields, in this case) appear
on the child class.
"""
m2mchildren = list(M2MChild.objects.filter(articles__isnull=False))
self.assertEqual(m2mchildren, [])
# Ordering should not include any database column more than once (this
# is most likely to occur naturally with model inheritance, so we
# check it here). Regression test for #9390. This necessarily pokes at
# the SQL string for the query, since the duplicate problems are only
# apparent at that late stage.
qs = ArticleWithAuthor.objects.order_by("pub_date", "pk")
sql = qs.query.get_compiler(qs.db).as_sql()[0]
fragment = sql[sql.find("ORDER BY") :]
pos = fragment.find("pub_date")
self.assertEqual(fragment.find("pub_date", pos + 1), -1)
def test_queryset_update_on_parent_model(self):
"""
Regression test for #10362
It is possible to call update() and only change a field in
an ancestor model.
"""
article = ArticleWithAuthor.objects.create(
author="fred",
headline="Hey there!",
pub_date=datetime.datetime(2009, 3, 1, 8, 0, 0),
)
update = ArticleWithAuthor.objects.filter(author="fred").update(
headline="Oh, no!"
)
self.assertEqual(update, 1)
update = ArticleWithAuthor.objects.filter(pk=article.pk).update(
headline="Oh, no!"
)
self.assertEqual(update, 1)
derivedm1 = DerivedM.objects.create(
customPK=44,
base_name="b1",
derived_name="d1",
)
self.assertEqual(derivedm1.customPK, 44)
self.assertEqual(derivedm1.base_name, "b1")
self.assertEqual(derivedm1.derived_name, "d1")
derivedms = list(DerivedM.objects.all())
self.assertEqual(derivedms, [derivedm1])
def test_use_explicit_o2o_to_parent_as_pk(self):
"""
The connector from child to parent need not be the pk on the child.
"""
self.assertEqual(ParkingLot3._meta.pk.name, "primary_key")
# the child->parent link
self.assertEqual(ParkingLot3._meta.get_ancestor_link(Place).name, "parent")
def test_use_explicit_o2o_to_parent_from_abstract_model(self):
self.assertEqual(ParkingLot4A._meta.pk.name, "parent")
ParkingLot4A.objects.create(
name="Parking4A",
address="21 Jump Street",
)
self.assertEqual(ParkingLot4B._meta.pk.name, "parent")
ParkingLot4A.objects.create(
name="Parking4B",
address="21 Jump Street",
)
def test_all_fields_from_abstract_base_class(self):
"""
Regression tests for #7588
"""
# All fields from an ABC, including those inherited non-abstractly
# should be available on child classes (#7588). Creating this instance
# should work without error.
QualityControl.objects.create(
headline="Problems in Django",
pub_date=datetime.datetime.now(),
quality=10,
assignee="adrian",
)
def test_abstract_base_class_m2m_relation_inheritance(self):
# many-to-many relations defined on an abstract base class are
# correctly inherited (and created) on the child class.
p1 = Person.objects.create(name="Alice")
p2 = Person.objects.create(name="Bob")
p3 = Person.objects.create(name="Carol")
p4 = Person.objects.create(name="Dave")
birthday = BirthdayParty.objects.create(name="Birthday party for Alice")
birthday.attendees.set([p1, p3])
bachelor = BachelorParty.objects.create(name="Bachelor party for Bob")
bachelor.attendees.set([p2, p4])
parties = list(p1.birthdayparty_set.all())
self.assertEqual(parties, [birthday])
parties = list(p1.bachelorparty_set.all())
self.assertEqual(parties, [])
parties = list(p2.bachelorparty_set.all())
self.assertEqual(parties, [bachelor])
# A subclass of a subclass of an abstract model doesn't get its own
# accessor.
self.assertFalse(hasattr(p2, "messybachelorparty_set"))
# ... but it does inherit the m2m from its parent
messy = MessyBachelorParty.objects.create(name="Bachelor party for Dave")
messy.attendees.set([p4])
messy_parent = messy.bachelorparty_ptr
parties = list(p4.bachelorparty_set.all())
self.assertEqual(parties, [bachelor, messy_parent])
def test_abstract_base_class_m2m_relation_inheritance_manager_reused(self):
p1 = Person.objects.create(name="Alice")
self.assertIs(p1.birthdayparty_set, p1.birthdayparty_set)
self.assertIs(p1.bachelorparty_set, p1.bachelorparty_set)
def test_abstract_verbose_name_plural_inheritance(self):
"""
verbose_name_plural correctly inherited from ABC if inheritance chain
includes an abstract model.
"""
# Regression test for #11369: verbose_name_plural should be inherited
# from an ABC even when there are one or more intermediate
# abstract models in the inheritance chain, for consistency with
# verbose_name.
self.assertEqual(InternalCertificationAudit._meta.verbose_name_plural, "Audits")
def test_inherited_nullable_exclude(self):
obj = SelfRefChild.objects.create(child_data=37, parent_data=42)
self.assertQuerysetEqual(
SelfRefParent.objects.exclude(self_data=72), [obj.pk], attrgetter("pk")
)
self.assertQuerysetEqual(
SelfRefChild.objects.exclude(self_data=72), [obj.pk], attrgetter("pk")
)
def test_concrete_abstract_concrete_pk(self):
"""
Primary key set correctly with concrete->abstract->concrete inheritance.
"""
# Regression test for #13987: Primary key is incorrectly determined
# when more than one model has a concrete->abstract->concrete
# inheritance hierarchy.
self.assertEqual(
len(
[field for field in BusStation._meta.local_fields if field.primary_key]
),
1,
)
self.assertEqual(
len(
[
field
for field in TrainStation._meta.local_fields
if field.primary_key
]
),
1,
)
self.assertIs(BusStation._meta.pk.model, BusStation)
self.assertIs(TrainStation._meta.pk.model, TrainStation)
def test_inherited_unique_field_with_form(self):
"""
A model which has different primary key for the parent model passes
unique field checking correctly (#17615).
"""
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = "__all__"
User.objects.create(username="user_only")
p = Profile.objects.create(username="user_with_profile")
form = ProfileForm(
{"username": "user_with_profile", "extra": "hello"}, instance=p
)
self.assertTrue(form.is_valid())
def test_inheritance_joins(self):
# Test for #17502 - check that filtering through two levels of
# inheritance chain doesn't generate extra joins.
qs = ItalianRestaurant.objects.all()
self.assertEqual(str(qs.query).count("JOIN"), 2)
qs = ItalianRestaurant.objects.filter(name="foo")
self.assertEqual(str(qs.query).count("JOIN"), 2)
@expectedFailure
def test_inheritance_values_joins(self):
# It would be nice (but not too important) to skip the middle join in
# this case. Skipping is possible as nothing from the middle model is
# used in the qs and top contains direct pointer to the bottom model.
qs = ItalianRestaurant.objects.values_list("serves_gnocchi").filter(name="foo")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_issue_21554(self):
senator = Senator.objects.create(name="John Doe", title="X", state="Y")
senator = Senator.objects.get(pk=senator.pk)
self.assertEqual(senator.name, "John Doe")
self.assertEqual(senator.title, "X")
self.assertEqual(senator.state, "Y")
def test_inheritance_resolve_columns(self):
Restaurant.objects.create(
name="Bobs Cafe",
address="Somewhere",
serves_pizza=True,
serves_hot_dogs=True,
)
p = Place.objects.select_related("restaurant")[0]
self.assertIsInstance(p.restaurant.serves_pizza, bool)
def test_inheritance_select_related(self):
# Regression test for #7246
r1 = Restaurant.objects.create(
name="Nobu", serves_hot_dogs=True, serves_pizza=False
)
r2 = Restaurant.objects.create(
name="Craft", serves_hot_dogs=False, serves_pizza=True
)
Supplier.objects.create(name="John", restaurant=r1)
Supplier.objects.create(name="Jane", restaurant=r2)
self.assertQuerysetEqual(
Supplier.objects.order_by("name").select_related(),
[
"Jane",
"John",
],
attrgetter("name"),
)
jane = Supplier.objects.order_by("name").select_related("restaurant")[0]
self.assertEqual(jane.restaurant.name, "Craft")
def test_filter_with_parent_fk(self):
r = Restaurant.objects.create()
s = Supplier.objects.create(restaurant=r)
# The mismatch between Restaurant and Place is intentional (#28175).
self.assertSequenceEqual(
Supplier.objects.filter(restaurant__in=Place.objects.all()), [s]
)
def test_ptr_accessor_assigns_state(self):
r = Restaurant.objects.create()
self.assertIs(r.place_ptr._state.adding, False)
self.assertEqual(r.place_ptr._state.db, "default")
def test_related_filtering_query_efficiency_ticket_15844(self):
r = Restaurant.objects.create(
name="Guido's House of Pasta",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
)
s = Supplier.objects.create(restaurant=r)
with self.assertNumQueries(1):
self.assertSequenceEqual(Supplier.objects.filter(restaurant=r), [s])
with self.assertNumQueries(1):
self.assertSequenceEqual(r.supplier_set.all(), [s])
def test_queries_on_parent_access(self):
italian_restaurant = ItalianRestaurant.objects.create(
name="Guido's House of Pasta",
address="944 W. Fullerton",
serves_hot_dogs=True,
serves_pizza=False,
serves_gnocchi=True,
)
# No queries are made when accessing the parent objects.
italian_restaurant = ItalianRestaurant.objects.get(pk=italian_restaurant.pk)
with self.assertNumQueries(0):
restaurant = italian_restaurant.restaurant_ptr
self.assertEqual(restaurant.place_ptr.restaurant, restaurant)
self.assertEqual(restaurant.italianrestaurant, italian_restaurant)
# One query is made when accessing the parent objects when the instance
# is deferred.
italian_restaurant = ItalianRestaurant.objects.only("serves_gnocchi").get(
pk=italian_restaurant.pk
)
with self.assertNumQueries(1):
restaurant = italian_restaurant.restaurant_ptr
self.assertEqual(restaurant.place_ptr.restaurant, restaurant)
self.assertEqual(restaurant.italianrestaurant, italian_restaurant)
# No queries are made when accessing the parent objects when the
# instance has deferred a field not present in the parent table.
italian_restaurant = ItalianRestaurant.objects.defer("serves_gnocchi").get(
pk=italian_restaurant.pk
)
with self.assertNumQueries(0):
restaurant = italian_restaurant.restaurant_ptr
self.assertEqual(restaurant.place_ptr.restaurant, restaurant)
self.assertEqual(restaurant.italianrestaurant, italian_restaurant)
def test_id_field_update_on_ancestor_change(self):
place1 = Place.objects.create(name="House of Pasta", address="944 Fullerton")
place2 = Place.objects.create(name="House of Pizza", address="954 Fullerton")
place3 = Place.objects.create(name="Burger house", address="964 Fullerton")
restaurant1 = Restaurant.objects.create(
place_ptr=place1,
serves_hot_dogs=True,
serves_pizza=False,
)
restaurant2 = Restaurant.objects.create(
place_ptr=place2,
serves_hot_dogs=True,
serves_pizza=False,
)
italian_restaurant = ItalianRestaurant.objects.create(
restaurant_ptr=restaurant1,
serves_gnocchi=True,
)
# Changing the parent of a restaurant changes the restaurant's ID & PK.
restaurant1.place_ptr = place3
self.assertEqual(restaurant1.pk, place3.pk)
self.assertEqual(restaurant1.id, place3.id)
self.assertEqual(restaurant1.pk, restaurant1.id)
restaurant1.place_ptr = None
self.assertIsNone(restaurant1.pk)
self.assertIsNone(restaurant1.id)
# Changing the parent of an italian restaurant changes the restaurant's
# ID & PK.
italian_restaurant.restaurant_ptr = restaurant2
self.assertEqual(italian_restaurant.pk, restaurant2.pk)
self.assertEqual(italian_restaurant.id, restaurant2.id)
self.assertEqual(italian_restaurant.pk, italian_restaurant.id)
italian_restaurant.restaurant_ptr = None
self.assertIsNone(italian_restaurant.pk)
self.assertIsNone(italian_restaurant.id)
def test_create_new_instance_with_pk_equals_none(self):
p1 = Profile.objects.create(username="john")
p2 = User.objects.get(pk=p1.user_ptr_id).profile
# Create a new profile by setting pk = None.
p2.pk = None
p2.user_ptr_id = None
p2.username = "bill"
p2.save()
self.assertEqual(Profile.objects.count(), 2)
self.assertEqual(User.objects.get(pk=p1.user_ptr_id).username, "john")
def test_create_new_instance_with_pk_equals_none_multi_inheritance(self):
c1 = Congressman.objects.create(state="PA", name="John", title="senator 1")
c2 = Person.objects.get(pk=c1.pk).congressman
# Create a new congressman by setting pk = None.
c2.pk = None
c2.id = None
c2.politician_ptr_id = None
c2.name = "Bill"
c2.title = "senator 2"
c2.save()
self.assertEqual(Congressman.objects.count(), 2)
self.assertEqual(Person.objects.get(pk=c1.pk).name, "John")
self.assertEqual(
Politician.objects.get(pk=c1.politician_ptr_id).title,
"senator 1",
)
def test_mti_update_parent_through_child(self):
Politician.objects.create()
Congressman.objects.create()
Congressman.objects.update(title="senator 1")
self.assertEqual(Congressman.objects.get().title, "senator 1")
def test_mti_update_grand_parent_through_child(self):
Politician.objects.create()
Senator.objects.create()
Senator.objects.update(title="senator 1")
self.assertEqual(Senator.objects.get().title, "senator 1")
|
749152fe002ec0937d79cbfe504f8a06031f5c42b3565e3d7853fe968fcc9485 | from django.db.models import F, Sum
from django.test import TestCase
from .models import Company, Employee
class ValuesExpressionsTests(TestCase):
@classmethod
def setUpTestData(cls):
Company.objects.create(
name="Example Inc.",
num_employees=2300,
num_chairs=5,
ceo=Employee.objects.create(firstname="Joe", lastname="Smith", salary=10),
)
Company.objects.create(
name="Foobar Ltd.",
num_employees=3,
num_chairs=4,
ceo=Employee.objects.create(firstname="Frank", lastname="Meyer", salary=20),
)
Company.objects.create(
name="Test GmbH",
num_employees=32,
num_chairs=1,
ceo=Employee.objects.create(
firstname="Max", lastname="Mustermann", salary=30
),
)
def test_values_expression(self):
self.assertSequenceEqual(
Company.objects.values(salary=F("ceo__salary")),
[{"salary": 10}, {"salary": 20}, {"salary": 30}],
)
def test_values_expression_alias_sql_injection(self):
crafted_alias = """injected_name" from "expressions_company"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Company.objects.values(**{crafted_alias: F("ceo__salary")})
def test_values_expression_group_by(self):
# values() applies annotate() first, so values selected are grouped by
# id, not firstname.
Employee.objects.create(firstname="Joe", lastname="Jones", salary=2)
joes = Employee.objects.filter(firstname="Joe")
self.assertSequenceEqual(
joes.values("firstname", sum_salary=Sum("salary")).order_by("sum_salary"),
[
{"firstname": "Joe", "sum_salary": 2},
{"firstname": "Joe", "sum_salary": 10},
],
)
self.assertSequenceEqual(
joes.values("firstname").annotate(sum_salary=Sum("salary")),
[{"firstname": "Joe", "sum_salary": 12}],
)
def test_chained_values_with_expression(self):
Employee.objects.create(firstname="Joe", lastname="Jones", salary=2)
joes = Employee.objects.filter(firstname="Joe").values("firstname")
self.assertSequenceEqual(
joes.values("firstname", sum_salary=Sum("salary")),
[{"firstname": "Joe", "sum_salary": 12}],
)
self.assertSequenceEqual(
joes.values(sum_salary=Sum("salary")), [{"sum_salary": 12}]
)
def test_values_list_expression(self):
companies = Company.objects.values_list("name", F("ceo__salary"))
self.assertSequenceEqual(
companies, [("Example Inc.", 10), ("Foobar Ltd.", 20), ("Test GmbH", 30)]
)
def test_values_list_expression_flat(self):
companies = Company.objects.values_list(F("ceo__salary"), flat=True)
self.assertSequenceEqual(companies, (10, 20, 30))
|
5aad640689ea5fbd4614710dfe3e1dba65cddf1853418541d90d6de693eb99ca | # Django documentation build configuration file, created by
# sphinx-quickstart on Thu Mar 27 09:06:53 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't picklable (module imports are okay, they're removed automatically).
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
from os.path import abspath, dirname, join
# Workaround for sphinx-build recursion limit overflow:
# pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
# RuntimeError: maximum recursion depth exceeded while pickling an object
#
# Python's default allowed recursion depth is 1000 but this isn't enough for
# building docs/ref/settings.txt sometimes.
# https://groups.google.com/g/sphinx-dev/c/MtRf64eGtv4/discussion
sys.setrecursionlimit(2000)
# Make sure we get the version of this copy of Django
sys.path.insert(1, dirname(dirname(abspath(__file__))))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(abspath(join(dirname(__file__), "_ext")))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "1.6.0"
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"djangodocs",
"sphinx.ext.extlinks",
"sphinx.ext.intersphinx",
"sphinx.ext.viewcode",
"sphinx.ext.autosectionlabel",
]
# AutosectionLabel settings.
# Uses a <page>:<label> schema which doesn't work for duplicate sub-section
# labels, so set max depth.
autosectionlabel_prefix_document = True
autosectionlabel_maxdepth = 2
# Linkcheck settings.
linkcheck_ignore = [
# Special-use addresses and domain names. (RFC 6761/6890)
r"^https?://(?:127\.0\.0\.1|\[::1\])(?::\d+)?/",
r"^https?://(?:[^/\.]+\.)*example\.(?:com|net|org)(?::\d+)?/",
r"^https?://(?:[^/\.]+\.)*(?:example|invalid|localhost|test)(?::\d+)?/",
# Pages that are inaccessible because they require authentication.
r"^https://github\.com/[^/]+/[^/]+/fork",
r"^https://code\.djangoproject\.com/github/login",
r"^https://code\.djangoproject\.com/newticket",
r"^https://(?:code|www)\.djangoproject\.com/admin/",
r"^https://www\.djangoproject\.com/community/add/blogs/",
r"^https://www\.google\.com/webmasters/tools/ping",
r"^https://search\.google\.com/search-console/welcome",
# Fragments used to dynamically switch content or populate fields.
r"^https://web\.libera\.chat/#",
r"^https://github\.com/[^#]+#L\d+-L\d+$",
r"^https://help\.apple\.com/itc/podcasts_connect/#/itc",
# Anchors on certain pages with missing a[name] attributes.
r"^https://tools\.ietf\.org/html/rfc1123\.html#section-",
]
# Spelling check needs an additional module that is not installed by default.
# Add it only if spelling check is requested so docs can be generated without it.
if "spelling" in sys.argv:
extensions.append("sphinxcontrib.spelling")
# Spelling language.
spelling_lang = "en_US"
# Location of word list.
spelling_word_list_filename = "spelling_wordlist"
spelling_warning = True
# Add any paths that contain templates here, relative to this directory.
# templates_path = []
# The suffix of source filenames.
source_suffix = ".txt"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "contents"
# General substitutions.
project = "Django"
copyright = "Django Software Foundation and contributors"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "4.1"
# The full version, including alpha/beta/rc tags.
try:
from django import VERSION, get_version
except ImportError:
release = version
else:
def django_release():
pep440ver = get_version()
if VERSION[3:5] == ("alpha", 0) and "dev" not in pep440ver:
return pep440ver + ".dev"
return pep440ver
release = django_release()
# The "development version" of Django
django_next_version = "4.1"
extlinks = {
"bpo": ("https://bugs.python.org/issue?@action=redirect&bpo=%s", "bpo-"),
"commit": ("https://github.com/django/django/commit/%s", ""),
"cve": ("https://nvd.nist.gov/vuln/detail/CVE-%s", "CVE-"),
# A file or directory. GitHub redirects from blob to tree if needed.
"source": ("https://github.com/django/django/blob/main/%s", ""),
"ticket": ("https://code.djangoproject.com/ticket/%s", "#"),
}
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# Location for .po/.mo translation files used when language is set
locale_dirs = ["locale/"]
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = "%B %d, %Y"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build", "_theme", "requirements.txt"]
# The reST default role (used for this markup: `text`) to use for all documents.
default_role = "default-role-error"
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = False
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "trac"
# Links to Python's docs should reference the most recent version of the 3.x
# branch, which is located at this URL.
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
"sphinx": ("https://www.sphinx-doc.org/en/master/", None),
"psycopg2": ("https://www.psycopg.org/docs/", None),
}
# Python's docs don't change every week.
intersphinx_cache_limit = 90 # days
# The 'versionadded' and 'versionchanged' directives are overridden.
suppress_warnings = ["app.add_directive"]
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "djangodocs"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_theme"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = "%b %d, %Y"
# Content template for the index page.
# html_index = ''
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "Djangodoc"
modindex_common_prefix = ["django."]
# Appended to every page
rst_epilog = """
.. |django-users| replace:: :ref:`django-users <django-users-mailing-list>`
.. |django-developers| replace:: :ref:`django-developers <django-developers-mailing-list>`
.. |django-announce| replace:: :ref:`django-announce <django-announce-mailing-list>`
.. |django-updates| replace:: :ref:`django-updates <django-updates-mailing-list>`
""" # NOQA
# -- Options for LaTeX output --------------------------------------------------
# Use XeLaTeX for Unicode support.
latex_engine = "xelatex"
latex_use_xindy = False
# Set font for CJK and fallbacks for unicode characters.
latex_elements = {
"fontpkg": r"""
\setmainfont{Symbola}
""",
"preamble": r"""
\usepackage{newunicodechar}
\usepackage[UTF8]{ctex}
\newunicodechar{π}{\ensuremath{\pi}}
\newunicodechar{≤}{\ensuremath{\le}}
\newunicodechar{≥}{\ensuremath{\ge}}
\newunicodechar{♥}{\ensuremath{\heartsuit}}
\newunicodechar{…}{\ensuremath{\ldots}}
""",
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
# latex_documents = []
latex_documents = [
(
"contents",
"django.tex",
"Django Documentation",
"Django Software Foundation",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
"ref/django-admin",
"django-admin",
"Utility script for the Django web framework",
["Django Software Foundation"],
1,
)
]
# -- Options for Texinfo output ------------------------------------------------
# List of tuples (startdocname, targetname, title, author, dir_entry,
# description, category, toctree_only)
texinfo_documents = [
(
master_doc,
"django",
"",
"",
"Django",
"Documentation of the Django framework",
"Web development",
False,
)
]
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = "Django Software Foundation"
epub_publisher = "Django Software Foundation"
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
# epub_basename = 'Django'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
epub_theme = "djangodocs-epub"
# The language of the text. It defaults to the language option
# or en if the language is not set.
# epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
# epub_scheme = ''
# The unique identifier of the text. This can be an ISBN number
# or the project homepage.
# epub_identifier = ''
# A unique identification for the text.
# epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
epub_cover = ("", "epub-cover.html")
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
# epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
# epub_post_files = []
# A list of files that should not be packed into the epub file.
# epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
# epub_tocdepth = 3
# Allow duplicate toc entries.
# epub_tocdup = True
# Choose between 'default' and 'includehidden'.
# epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
# epub_fix_images = False
# Scale large images.
# epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# epub_show_urls = 'inline'
# If false, no index is generated.
# epub_use_index = True
|
c374164c02876e81c8ac9a47122cc03b284f19c6258d99ed395fe4d0c5f652e7 | """
Timezone-related classes and functions.
"""
import functools
import sys
import warnings
try:
import zoneinfo
except ImportError:
from backports import zoneinfo
from contextlib import ContextDecorator
from datetime import datetime, timedelta, timezone, tzinfo
from asgiref.local import Local
from django.conf import settings
from django.utils.deprecation import RemovedInDjango50Warning
__all__ = [ # noqa for utc RemovedInDjango50Warning.
"utc",
"get_fixed_timezone",
"get_default_timezone",
"get_default_timezone_name",
"get_current_timezone",
"get_current_timezone_name",
"activate",
"deactivate",
"override",
"localtime",
"localdate",
"now",
"is_aware",
"is_naive",
"make_aware",
"make_naive",
]
# RemovedInDjango50Warning: sentinel for deprecation of is_dst parameters.
NOT_PASSED = object()
def __getattr__(name):
if name != "utc":
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
warnings.warn(
"The django.utils.timezone.utc alias is deprecated. "
"Please update your code to use datetime.timezone.utc instead.",
RemovedInDjango50Warning,
stacklevel=2,
)
return timezone.utc
def get_fixed_timezone(offset):
"""Return a tzinfo instance with a fixed offset from UTC."""
if isinstance(offset, timedelta):
offset = offset.total_seconds() // 60
sign = "-" if offset < 0 else "+"
hhmm = "%02d%02d" % divmod(abs(offset), 60)
name = sign + hhmm
return timezone(timedelta(minutes=offset), name)
# In order to avoid accessing settings at compile time,
# wrap the logic in a function and cache the result.
@functools.lru_cache
def get_default_timezone():
"""
Return the default time zone as a tzinfo instance.
This is the time zone defined by settings.TIME_ZONE.
"""
if settings.USE_DEPRECATED_PYTZ:
import pytz
return pytz.timezone(settings.TIME_ZONE)
return zoneinfo.ZoneInfo(settings.TIME_ZONE)
# This function exists for consistency with get_current_timezone_name
def get_default_timezone_name():
"""Return the name of the default time zone."""
return _get_timezone_name(get_default_timezone())
_active = Local()
def get_current_timezone():
"""Return the currently active time zone as a tzinfo instance."""
return getattr(_active, "value", get_default_timezone())
def get_current_timezone_name():
"""Return the name of the currently active time zone."""
return _get_timezone_name(get_current_timezone())
def _get_timezone_name(timezone):
"""
Return the offset for fixed offset timezones, or the name of timezone if
not set.
"""
return timezone.tzname(None) or str(timezone)
# Timezone selection functions.
# These functions don't change os.environ['TZ'] and call time.tzset()
# because it isn't thread safe.
def activate(timezone):
"""
Set the time zone for the current thread.
The ``timezone`` argument must be an instance of a tzinfo subclass or a
time zone name.
"""
if isinstance(timezone, tzinfo):
_active.value = timezone
elif isinstance(timezone, str):
if settings.USE_DEPRECATED_PYTZ:
import pytz
_active.value = pytz.timezone(timezone)
else:
_active.value = zoneinfo.ZoneInfo(timezone)
else:
raise ValueError("Invalid timezone: %r" % timezone)
def deactivate():
"""
Unset the time zone for the current thread.
Django will then use the time zone defined by settings.TIME_ZONE.
"""
if hasattr(_active, "value"):
del _active.value
class override(ContextDecorator):
"""
Temporarily set the time zone for the current thread.
This is a context manager that uses django.utils.timezone.activate()
to set the timezone on entry and restores the previously active timezone
on exit.
The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a
time zone name, or ``None``. If it is ``None``, Django enables the default
time zone.
"""
def __init__(self, timezone):
self.timezone = timezone
def __enter__(self):
self.old_timezone = getattr(_active, "value", None)
if self.timezone is None:
deactivate()
else:
activate(self.timezone)
def __exit__(self, exc_type, exc_value, traceback):
if self.old_timezone is None:
deactivate()
else:
_active.value = self.old_timezone
# Templates
def template_localtime(value, use_tz=None):
"""
Check if value is a datetime and converts it to local time if necessary.
If use_tz is provided and is not None, that will force the value to
be converted (or not), overriding the value of settings.USE_TZ.
This function is designed for use by the template engine.
"""
should_convert = (
isinstance(value, datetime)
and (settings.USE_TZ if use_tz is None else use_tz)
and not is_naive(value)
and getattr(value, "convert_to_local_time", True)
)
return localtime(value) if should_convert else value
# Utilities
def localtime(value=None, timezone=None):
"""
Convert an aware datetime.datetime to local time.
Only aware datetimes are allowed. When value is omitted, it defaults to
now().
Local time is defined by the current time zone, unless another time zone
is specified.
"""
if value is None:
value = now()
if timezone is None:
timezone = get_current_timezone()
# Emulate the behavior of astimezone() on Python < 3.6.
if is_naive(value):
raise ValueError("localtime() cannot be applied to a naive datetime")
return value.astimezone(timezone)
def localdate(value=None, timezone=None):
"""
Convert an aware datetime to local time and return the value's date.
Only aware datetimes are allowed. When value is omitted, it defaults to
now().
Local time is defined by the current time zone, unless another time zone is
specified.
"""
return localtime(value, timezone).date()
def now():
"""
Return an aware or naive datetime.datetime, depending on settings.USE_TZ.
"""
return datetime.now(tz=timezone.utc if settings.USE_TZ else None)
# By design, these four functions don't perform any checks on their arguments.
# The caller should ensure that they don't receive an invalid value like None.
def is_aware(value):
"""
Determine if a given datetime.datetime is aware.
The concept is defined in Python's docs:
https://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is not None
def is_naive(value):
"""
Determine if a given datetime.datetime is naive.
The concept is defined in Python's docs:
https://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is None
def make_aware(value, timezone=None, is_dst=NOT_PASSED):
"""Make a naive datetime.datetime in a given time zone aware."""
if is_dst is NOT_PASSED:
is_dst = None
else:
warnings.warn(
"The is_dst argument to make_aware(), used by the Trunc() "
"database functions and QuerySet.datetimes(), is deprecated as it "
"has no effect with zoneinfo time zones.",
RemovedInDjango50Warning,
)
if timezone is None:
timezone = get_current_timezone()
if _is_pytz_zone(timezone):
# This method is available for pytz time zones.
return timezone.localize(value, is_dst=is_dst)
else:
# Check that we won't overwrite the timezone of an aware datetime.
if is_aware(value):
raise ValueError("make_aware expects a naive datetime, got %s" % value)
# This may be wrong around DST changes!
return value.replace(tzinfo=timezone)
def make_naive(value, timezone=None):
"""Make an aware datetime.datetime naive in a given time zone."""
if timezone is None:
timezone = get_current_timezone()
# Emulate the behavior of astimezone() on Python < 3.6.
if is_naive(value):
raise ValueError("make_naive() cannot be applied to a naive datetime")
return value.astimezone(timezone).replace(tzinfo=None)
_PYTZ_IMPORTED = False
def _pytz_imported():
"""
Detects whether or not pytz has been imported without importing pytz.
Copied from pytz_deprecation_shim with thanks to Paul Ganssle.
"""
global _PYTZ_IMPORTED
if not _PYTZ_IMPORTED and "pytz" in sys.modules:
_PYTZ_IMPORTED = True
return _PYTZ_IMPORTED
def _is_pytz_zone(tz):
"""Checks if a zone is a pytz zone."""
# See if pytz was already imported rather than checking
# settings.USE_DEPRECATED_PYTZ to *allow* manually passing a pytz timezone,
# which some of the test cases (at least) rely on.
if not _pytz_imported():
return False
# If tz could be pytz, then pytz is needed here.
import pytz
_PYTZ_BASE_CLASSES = (pytz.tzinfo.BaseTzInfo, pytz._FixedOffset)
# In releases prior to 2018.4, pytz.UTC was not a subclass of BaseTzInfo
if not isinstance(pytz.UTC, pytz._FixedOffset):
_PYTZ_BASE_CLASSES = _PYTZ_BASE_CLASSES + (type(pytz.UTC),)
return isinstance(tz, _PYTZ_BASE_CLASSES)
def _datetime_ambiguous_or_imaginary(dt, tz):
if _is_pytz_zone(tz):
import pytz
try:
tz.utcoffset(dt)
except (pytz.AmbiguousTimeError, pytz.NonExistentTimeError):
return True
else:
return False
return tz.utcoffset(dt.replace(fold=not dt.fold)) != tz.utcoffset(dt)
# RemovedInDjango50Warning.
_DIR = dir()
def __dir__():
return sorted([*_DIR, "utc"])
|
73864836044492fcacd960ca35592e66a64a1b8b7542ed36b544e72f1d16039e | import functools
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
from .base import Template
from .context import Context, _builtin_context_processors
from .exceptions import TemplateDoesNotExist
from .library import import_library
class Engine:
default_builtins = [
"django.template.defaulttags",
"django.template.defaultfilters",
"django.template.loader_tags",
]
def __init__(
self,
dirs=None,
app_dirs=False,
context_processors=None,
debug=False,
loaders=None,
string_if_invalid="",
file_charset="utf-8",
libraries=None,
builtins=None,
autoescape=True,
):
if dirs is None:
dirs = []
if context_processors is None:
context_processors = []
if loaders is None:
loaders = ["django.template.loaders.filesystem.Loader"]
if app_dirs:
loaders += ["django.template.loaders.app_directories.Loader"]
loaders = [("django.template.loaders.cached.Loader", loaders)]
else:
if app_dirs:
raise ImproperlyConfigured(
"app_dirs must not be set when loaders is defined."
)
if libraries is None:
libraries = {}
if builtins is None:
builtins = []
self.dirs = dirs
self.app_dirs = app_dirs
self.autoescape = autoescape
self.context_processors = context_processors
self.debug = debug
self.loaders = loaders
self.string_if_invalid = string_if_invalid
self.file_charset = file_charset
self.libraries = libraries
self.template_libraries = self.get_template_libraries(libraries)
self.builtins = self.default_builtins + builtins
self.template_builtins = self.get_template_builtins(self.builtins)
def __repr__(self):
return (
"<%s:%s app_dirs=%s%s debug=%s loaders=%s string_if_invalid=%s "
"file_charset=%s%s%s autoescape=%s>"
) % (
self.__class__.__qualname__,
"" if not self.dirs else " dirs=%s" % repr(self.dirs),
self.app_dirs,
""
if not self.context_processors
else " context_processors=%s" % repr(self.context_processors),
self.debug,
repr(self.loaders),
repr(self.string_if_invalid),
repr(self.file_charset),
"" if not self.libraries else " libraries=%s" % repr(self.libraries),
"" if not self.builtins else " builtins=%s" % repr(self.builtins),
repr(self.autoescape),
)
@staticmethod
@functools.lru_cache
def get_default():
"""
Return the first DjangoTemplates backend that's configured, or raise
ImproperlyConfigured if none are configured.
This is required for preserving historical APIs that rely on a
globally available, implicitly configured engine such as:
>>> from django.template import Context, Template
>>> template = Template("Hello {{ name }}!")
>>> context = Context({'name': "world"})
>>> template.render(context)
'Hello world!'
"""
# Since Engine is imported in django.template and since
# DjangoTemplates is a wrapper around this Engine class,
# local imports are required to avoid import loops.
from django.template import engines
from django.template.backends.django import DjangoTemplates
for engine in engines.all():
if isinstance(engine, DjangoTemplates):
return engine.engine
raise ImproperlyConfigured("No DjangoTemplates backend is configured.")
@cached_property
def template_context_processors(self):
context_processors = _builtin_context_processors
context_processors += tuple(self.context_processors)
return tuple(import_string(path) for path in context_processors)
def get_template_builtins(self, builtins):
return [import_library(x) for x in builtins]
def get_template_libraries(self, libraries):
loaded = {}
for name, path in libraries.items():
loaded[name] = import_library(path)
return loaded
@cached_property
def template_loaders(self):
return self.get_template_loaders(self.loaders)
def get_template_loaders(self, template_loaders):
loaders = []
for template_loader in template_loaders:
loader = self.find_template_loader(template_loader)
if loader is not None:
loaders.append(loader)
return loaders
def find_template_loader(self, loader):
if isinstance(loader, (tuple, list)):
loader, *args = loader
else:
args = []
if isinstance(loader, str):
loader_class = import_string(loader)
return loader_class(self, *args)
else:
raise ImproperlyConfigured(
"Invalid value in template loaders configuration: %r" % loader
)
def find_template(self, name, dirs=None, skip=None):
tried = []
for loader in self.template_loaders:
try:
template = loader.get_template(name, skip=skip)
return template, template.origin
except TemplateDoesNotExist as e:
tried.extend(e.tried)
raise TemplateDoesNotExist(name, tried=tried)
def from_string(self, template_code):
"""
Return a compiled Template object for the given template code,
handling template inheritance recursively.
"""
return Template(template_code, engine=self)
def get_template(self, template_name):
"""
Return a compiled Template object for the given template name,
handling template inheritance recursively.
"""
template, origin = self.find_template(template_name)
if not hasattr(template, "render"):
# template needs to be compiled
template = Template(template, origin, template_name, engine=self)
return template
def render_to_string(self, template_name, context=None):
"""
Render the template specified by template_name with the given context.
For use in Django's test suite.
"""
if isinstance(template_name, (list, tuple)):
t = self.select_template(template_name)
else:
t = self.get_template(template_name)
# Django < 1.8 accepted a Context in `context` even though that's
# unintended. Preserve this ability but don't rewrap `context`.
if isinstance(context, Context):
return t.render(context)
else:
return t.render(Context(context, autoescape=self.autoescape))
def select_template(self, template_name_list):
"""
Given a list of template names, return the first that can be loaded.
"""
if not template_name_list:
raise TemplateDoesNotExist("No template names provided")
not_found = []
for template_name in template_name_list:
try:
return self.get_template(template_name)
except TemplateDoesNotExist as exc:
if exc.args[0] not in not_found:
not_found.append(exc.args[0])
continue
# If we get here, none of the templates could be loaded
raise TemplateDoesNotExist(", ".join(not_found))
|
ac89b65656db94b5ab31345827524122e8b9c6c84033eda6a7225ff7cf713a06 | """
Clickjacking Protection Middleware.
This module provides a middleware that implements protection against a
malicious site loading resources from your site in a hidden frame.
"""
from django.conf import settings
from django.utils.deprecation import MiddlewareMixin
class XFrameOptionsMiddleware(MiddlewareMixin):
"""
Set the X-Frame-Options HTTP header in HTTP responses.
Do not set the header if it's already set or if the response contains
a xframe_options_exempt value set to True.
By default, set the X-Frame-Options header to 'DENY', meaning the response
cannot be displayed in a frame, regardless of the site attempting to do so.
To enable the response to be loaded on a frame within the same site, set
X_FRAME_OPTIONS in your project's Django settings to 'SAMEORIGIN'.
"""
def process_response(self, request, response):
# Don't set it if it's already in the response
if response.get("X-Frame-Options") is not None:
return response
# Don't set it if they used @xframe_options_exempt
if getattr(response, "xframe_options_exempt", False):
return response
response.headers["X-Frame-Options"] = self.get_xframe_options_value(
request,
response,
)
return response
def get_xframe_options_value(self, request, response):
"""
Get the value to set for the X_FRAME_OPTIONS header. Use the value from
the X_FRAME_OPTIONS setting, or 'DENY' if not set.
This method can be overridden if needed, allowing it to vary based on
the request or response.
"""
return getattr(settings, "X_FRAME_OPTIONS", "DENY").upper()
|
e91b629d63225f803152ad1c4e24cb0f2ccfb9864230f3a54549a37aa9968b8d | from django.core.exceptions import ValidationError
from django.forms import Form
from django.forms.fields import BooleanField, IntegerField
from django.forms.renderers import get_default_renderer
from django.forms.utils import ErrorList, RenderableFormMixin
from django.forms.widgets import CheckboxInput, HiddenInput, NumberInput
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext
__all__ = ("BaseFormSet", "formset_factory", "all_valid")
# special field names
TOTAL_FORM_COUNT = "TOTAL_FORMS"
INITIAL_FORM_COUNT = "INITIAL_FORMS"
MIN_NUM_FORM_COUNT = "MIN_NUM_FORMS"
MAX_NUM_FORM_COUNT = "MAX_NUM_FORMS"
ORDERING_FIELD_NAME = "ORDER"
DELETION_FIELD_NAME = "DELETE"
# default minimum number of forms in a formset
DEFAULT_MIN_NUM = 0
# default maximum number of forms in a formset, to prevent memory exhaustion
DEFAULT_MAX_NUM = 1000
class ManagementForm(Form):
"""
Keep track of how many form instances are displayed on the page. If adding
new forms via JavaScript, you should increment the count field of this form
as well.
"""
TOTAL_FORMS = IntegerField(widget=HiddenInput)
INITIAL_FORMS = IntegerField(widget=HiddenInput)
# MIN_NUM_FORM_COUNT and MAX_NUM_FORM_COUNT are output with the rest of the
# management form, but only for the convenience of client-side code. The
# POST value of them returned from the client is not checked.
MIN_NUM_FORMS = IntegerField(required=False, widget=HiddenInput)
MAX_NUM_FORMS = IntegerField(required=False, widget=HiddenInput)
def clean(self):
cleaned_data = super().clean()
# When the management form is invalid, we don't know how many forms
# were submitted.
cleaned_data.setdefault(TOTAL_FORM_COUNT, 0)
cleaned_data.setdefault(INITIAL_FORM_COUNT, 0)
return cleaned_data
class BaseFormSet(RenderableFormMixin):
"""
A collection of instances of the same Form class.
"""
deletion_widget = CheckboxInput
ordering_widget = NumberInput
default_error_messages = {
"missing_management_form": _(
"ManagementForm data is missing or has been tampered with. Missing fields: "
"%(field_names)s. You may need to file a bug report if the issue persists."
),
}
template_name_p = "django/forms/formsets/p.html"
template_name_table = "django/forms/formsets/table.html"
template_name_ul = "django/forms/formsets/ul.html"
def __init__(
self,
data=None,
files=None,
auto_id="id_%s",
prefix=None,
initial=None,
error_class=ErrorList,
form_kwargs=None,
error_messages=None,
):
self.is_bound = data is not None or files is not None
self.prefix = prefix or self.get_default_prefix()
self.auto_id = auto_id
self.data = data or {}
self.files = files or {}
self.initial = initial
self.form_kwargs = form_kwargs or {}
self.error_class = error_class
self._errors = None
self._non_form_errors = None
messages = {}
for cls in reversed(type(self).__mro__):
messages.update(getattr(cls, "default_error_messages", {}))
if error_messages is not None:
messages.update(error_messages)
self.error_messages = messages
def __iter__(self):
"""Yield the forms in the order they should be rendered."""
return iter(self.forms)
def __getitem__(self, index):
"""Return the form at the given index, based on the rendering order."""
return self.forms[index]
def __len__(self):
return len(self.forms)
def __bool__(self):
"""
Return True since all formsets have a management form which is not
included in the length.
"""
return True
def __repr__(self):
if self._errors is None:
is_valid = "Unknown"
else:
is_valid = (
self.is_bound
and not self._non_form_errors
and not any(form_errors for form_errors in self._errors)
)
return "<%s: bound=%s valid=%s total_forms=%s>" % (
self.__class__.__qualname__,
self.is_bound,
is_valid,
self.total_form_count(),
)
@cached_property
def management_form(self):
"""Return the ManagementForm instance for this FormSet."""
if self.is_bound:
form = ManagementForm(
self.data,
auto_id=self.auto_id,
prefix=self.prefix,
renderer=self.renderer,
)
form.full_clean()
else:
form = ManagementForm(
auto_id=self.auto_id,
prefix=self.prefix,
initial={
TOTAL_FORM_COUNT: self.total_form_count(),
INITIAL_FORM_COUNT: self.initial_form_count(),
MIN_NUM_FORM_COUNT: self.min_num,
MAX_NUM_FORM_COUNT: self.max_num,
},
renderer=self.renderer,
)
return form
def total_form_count(self):
"""Return the total number of forms in this FormSet."""
if self.is_bound:
# return absolute_max if it is lower than the actual total form
# count in the data; this is DoS protection to prevent clients
# from forcing the server to instantiate arbitrary numbers of
# forms
return min(
self.management_form.cleaned_data[TOTAL_FORM_COUNT], self.absolute_max
)
else:
initial_forms = self.initial_form_count()
total_forms = max(initial_forms, self.min_num) + self.extra
# Allow all existing related objects/inlines to be displayed,
# but don't allow extra beyond max_num.
if initial_forms > self.max_num >= 0:
total_forms = initial_forms
elif total_forms > self.max_num >= 0:
total_forms = self.max_num
return total_forms
def initial_form_count(self):
"""Return the number of forms that are required in this FormSet."""
if self.is_bound:
return self.management_form.cleaned_data[INITIAL_FORM_COUNT]
else:
# Use the length of the initial data if it's there, 0 otherwise.
initial_forms = len(self.initial) if self.initial else 0
return initial_forms
@cached_property
def forms(self):
"""Instantiate forms at first property access."""
# DoS protection is included in total_form_count()
return [
self._construct_form(i, **self.get_form_kwargs(i))
for i in range(self.total_form_count())
]
def get_form_kwargs(self, index):
"""
Return additional keyword arguments for each individual formset form.
index will be None if the form being constructed is a new empty
form.
"""
return self.form_kwargs.copy()
def _construct_form(self, i, **kwargs):
"""Instantiate and return the i-th form instance in a formset."""
defaults = {
"auto_id": self.auto_id,
"prefix": self.add_prefix(i),
"error_class": self.error_class,
# Don't render the HTML 'required' attribute as it may cause
# incorrect validation for extra, optional, and deleted
# forms in the formset.
"use_required_attribute": False,
"renderer": self.renderer,
}
if self.is_bound:
defaults["data"] = self.data
defaults["files"] = self.files
if self.initial and "initial" not in kwargs:
try:
defaults["initial"] = self.initial[i]
except IndexError:
pass
# Allow extra forms to be empty, unless they're part of
# the minimum forms.
if i >= self.initial_form_count() and i >= self.min_num:
defaults["empty_permitted"] = True
defaults.update(kwargs)
form = self.form(**defaults)
self.add_fields(form, i)
return form
@property
def initial_forms(self):
"""Return a list of all the initial forms in this formset."""
return self.forms[: self.initial_form_count()]
@property
def extra_forms(self):
"""Return a list of all the extra forms in this formset."""
return self.forms[self.initial_form_count() :]
@property
def empty_form(self):
form = self.form(
auto_id=self.auto_id,
prefix=self.add_prefix("__prefix__"),
empty_permitted=True,
use_required_attribute=False,
**self.get_form_kwargs(None),
renderer=self.renderer,
)
self.add_fields(form, None)
return form
@property
def cleaned_data(self):
"""
Return a list of form.cleaned_data dicts for every form in self.forms.
"""
if not self.is_valid():
raise AttributeError(
"'%s' object has no attribute 'cleaned_data'" % self.__class__.__name__
)
return [form.cleaned_data for form in self.forms]
@property
def deleted_forms(self):
"""Return a list of forms that have been marked for deletion."""
if not self.is_valid() or not self.can_delete:
return []
# construct _deleted_form_indexes which is just a list of form indexes
# that have had their deletion widget set to True
if not hasattr(self, "_deleted_form_indexes"):
self._deleted_form_indexes = []
for i, form in enumerate(self.forms):
# if this is an extra form and hasn't changed, don't consider it
if i >= self.initial_form_count() and not form.has_changed():
continue
if self._should_delete_form(form):
self._deleted_form_indexes.append(i)
return [self.forms[i] for i in self._deleted_form_indexes]
@property
def ordered_forms(self):
"""
Return a list of form in the order specified by the incoming data.
Raise an AttributeError if ordering is not allowed.
"""
if not self.is_valid() or not self.can_order:
raise AttributeError(
"'%s' object has no attribute 'ordered_forms'" % self.__class__.__name__
)
# Construct _ordering, which is a list of (form_index, order_field_value)
# tuples. After constructing this list, we'll sort it by order_field_value
# so we have a way to get to the form indexes in the order specified
# by the form data.
if not hasattr(self, "_ordering"):
self._ordering = []
for i, form in enumerate(self.forms):
# if this is an extra form and hasn't changed, don't consider it
if i >= self.initial_form_count() and not form.has_changed():
continue
# don't add data marked for deletion to self.ordered_data
if self.can_delete and self._should_delete_form(form):
continue
self._ordering.append((i, form.cleaned_data[ORDERING_FIELD_NAME]))
# After we're done populating self._ordering, sort it.
# A sort function to order things numerically ascending, but
# None should be sorted below anything else. Allowing None as
# a comparison value makes it so we can leave ordering fields
# blank.
def compare_ordering_key(k):
if k[1] is None:
return (1, 0) # +infinity, larger than any number
return (0, k[1])
self._ordering.sort(key=compare_ordering_key)
# Return a list of form.cleaned_data dicts in the order specified by
# the form data.
return [self.forms[i[0]] for i in self._ordering]
@classmethod
def get_default_prefix(cls):
return "form"
@classmethod
def get_deletion_widget(cls):
return cls.deletion_widget
@classmethod
def get_ordering_widget(cls):
return cls.ordering_widget
def non_form_errors(self):
"""
Return an ErrorList of errors that aren't associated with a particular
form -- i.e., from formset.clean(). Return an empty ErrorList if there
are none.
"""
if self._non_form_errors is None:
self.full_clean()
return self._non_form_errors
@property
def errors(self):
"""Return a list of form.errors for every form in self.forms."""
if self._errors is None:
self.full_clean()
return self._errors
def total_error_count(self):
"""Return the number of errors across all forms in the formset."""
return len(self.non_form_errors()) + sum(
len(form_errors) for form_errors in self.errors
)
def _should_delete_form(self, form):
"""Return whether or not the form was marked for deletion."""
return form.cleaned_data.get(DELETION_FIELD_NAME, False)
def is_valid(self):
"""Return True if every form in self.forms is valid."""
if not self.is_bound:
return False
# Accessing errors triggers a full clean the first time only.
self.errors
# List comprehension ensures is_valid() is called for all forms.
# Forms due to be deleted shouldn't cause the formset to be invalid.
forms_valid = all(
[
form.is_valid()
for form in self.forms
if not (self.can_delete and self._should_delete_form(form))
]
)
return forms_valid and not self.non_form_errors()
def full_clean(self):
"""
Clean all of self.data and populate self._errors and
self._non_form_errors.
"""
self._errors = []
self._non_form_errors = self.error_class(
error_class="nonform", renderer=self.renderer
)
empty_forms_count = 0
if not self.is_bound: # Stop further processing.
return
if not self.management_form.is_valid():
error = ValidationError(
self.error_messages["missing_management_form"],
params={
"field_names": ", ".join(
self.management_form.add_prefix(field_name)
for field_name in self.management_form.errors
),
},
code="missing_management_form",
)
self._non_form_errors.append(error)
for i, form in enumerate(self.forms):
# Empty forms are unchanged forms beyond those with initial data.
if not form.has_changed() and i >= self.initial_form_count():
empty_forms_count += 1
# Accessing errors calls full_clean() if necessary.
# _should_delete_form() requires cleaned_data.
form_errors = form.errors
if self.can_delete and self._should_delete_form(form):
continue
self._errors.append(form_errors)
try:
if (
self.validate_max
and self.total_form_count() - len(self.deleted_forms) > self.max_num
) or self.management_form.cleaned_data[
TOTAL_FORM_COUNT
] > self.absolute_max:
raise ValidationError(
ngettext(
"Please submit at most %d form.",
"Please submit at most %d forms.",
self.max_num,
)
% self.max_num,
code="too_many_forms",
)
if (
self.validate_min
and self.total_form_count()
- len(self.deleted_forms)
- empty_forms_count
< self.min_num
):
raise ValidationError(
ngettext(
"Please submit at least %d form.",
"Please submit at least %d forms.",
self.min_num,
)
% self.min_num,
code="too_few_forms",
)
# Give self.clean() a chance to do cross-form validation.
self.clean()
except ValidationError as e:
self._non_form_errors = self.error_class(
e.error_list,
error_class="nonform",
renderer=self.renderer,
)
def clean(self):
"""
Hook for doing any extra formset-wide cleaning after Form.clean() has
been called on every form. Any ValidationError raised by this method
will not be associated with a particular form; it will be accessible
via formset.non_form_errors()
"""
pass
def has_changed(self):
"""Return True if data in any form differs from initial."""
return any(form.has_changed() for form in self)
def add_fields(self, form, index):
"""A hook for adding extra fields on to each form instance."""
initial_form_count = self.initial_form_count()
if self.can_order:
# Only pre-fill the ordering field for initial forms.
if index is not None and index < initial_form_count:
form.fields[ORDERING_FIELD_NAME] = IntegerField(
label=_("Order"),
initial=index + 1,
required=False,
widget=self.get_ordering_widget(),
)
else:
form.fields[ORDERING_FIELD_NAME] = IntegerField(
label=_("Order"),
required=False,
widget=self.get_ordering_widget(),
)
if self.can_delete and (self.can_delete_extra or index < initial_form_count):
form.fields[DELETION_FIELD_NAME] = BooleanField(
label=_("Delete"),
required=False,
widget=self.get_deletion_widget(),
)
def add_prefix(self, index):
return "%s-%s" % (self.prefix, index)
def is_multipart(self):
"""
Return True if the formset needs to be multipart, i.e. it
has FileInput, or False otherwise.
"""
if self.forms:
return self.forms[0].is_multipart()
else:
return self.empty_form.is_multipart()
@property
def media(self):
# All the forms on a FormSet are the same, so you only need to
# interrogate the first form for media.
if self.forms:
return self.forms[0].media
else:
return self.empty_form.media
@property
def template_name(self):
return self.renderer.formset_template_name
def get_context(self):
return {"formset": self}
def formset_factory(
form,
formset=BaseFormSet,
extra=1,
can_order=False,
can_delete=False,
max_num=None,
validate_max=False,
min_num=None,
validate_min=False,
absolute_max=None,
can_delete_extra=True,
renderer=None,
):
"""Return a FormSet for the given form class."""
if min_num is None:
min_num = DEFAULT_MIN_NUM
if max_num is None:
max_num = DEFAULT_MAX_NUM
# absolute_max is a hard limit on forms instantiated, to prevent
# memory-exhaustion attacks. Default to max_num + DEFAULT_MAX_NUM
# (which is 2 * DEFAULT_MAX_NUM if max_num is None in the first place).
if absolute_max is None:
absolute_max = max_num + DEFAULT_MAX_NUM
if max_num > absolute_max:
raise ValueError("'absolute_max' must be greater or equal to 'max_num'.")
attrs = {
"form": form,
"extra": extra,
"can_order": can_order,
"can_delete": can_delete,
"can_delete_extra": can_delete_extra,
"min_num": min_num,
"max_num": max_num,
"absolute_max": absolute_max,
"validate_min": validate_min,
"validate_max": validate_max,
"renderer": renderer or get_default_renderer(),
}
return type(form.__name__ + "FormSet", (formset,), attrs)
def all_valid(formsets):
"""Validate every formset and return True if all are valid."""
# List comprehension ensures is_valid() is called for all formsets.
return all([formset.is_valid() for formset in formsets])
|
0a21af382804f13323314e88a61a4019e269e272e28ba785a043f1375a0c5348 | """
HTML Widget classes
"""
import copy
import datetime
import warnings
from collections import defaultdict
from itertools import chain
from django.forms.utils import to_current_timezone
from django.templatetags.static import static
from django.utils import formats
from django.utils.datastructures import OrderedSet
from django.utils.dates import MONTHS
from django.utils.formats import get_format
from django.utils.html import format_html, html_safe
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import mark_safe
from django.utils.topological_sort import CyclicDependencyError, stable_topological_sort
from django.utils.translation import gettext_lazy as _
from .renderers import get_default_renderer
__all__ = (
"Media",
"MediaDefiningClass",
"Widget",
"TextInput",
"NumberInput",
"EmailInput",
"URLInput",
"PasswordInput",
"HiddenInput",
"MultipleHiddenInput",
"FileInput",
"ClearableFileInput",
"Textarea",
"DateInput",
"DateTimeInput",
"TimeInput",
"CheckboxInput",
"Select",
"NullBooleanSelect",
"SelectMultiple",
"RadioSelect",
"CheckboxSelectMultiple",
"MultiWidget",
"SplitDateTimeWidget",
"SplitHiddenDateTimeWidget",
"SelectDateWidget",
)
MEDIA_TYPES = ("css", "js")
class MediaOrderConflictWarning(RuntimeWarning):
pass
@html_safe
class Media:
def __init__(self, media=None, css=None, js=None):
if media is not None:
css = getattr(media, "css", {})
js = getattr(media, "js", [])
else:
if css is None:
css = {}
if js is None:
js = []
self._css_lists = [css]
self._js_lists = [js]
def __repr__(self):
return "Media(css=%r, js=%r)" % (self._css, self._js)
def __str__(self):
return self.render()
@property
def _css(self):
css = defaultdict(list)
for css_list in self._css_lists:
for medium, sublist in css_list.items():
css[medium].append(sublist)
return {medium: self.merge(*lists) for medium, lists in css.items()}
@property
def _js(self):
return self.merge(*self._js_lists)
def render(self):
return mark_safe(
"\n".join(
chain.from_iterable(
getattr(self, "render_" + name)() for name in MEDIA_TYPES
)
)
)
def render_js(self):
return [
path.__html__()
if hasattr(path, "__html__")
else format_html('<script src="{}"></script>', self.absolute_path(path))
for path in self._js
]
def render_css(self):
# To keep rendering order consistent, we can't just iterate over items().
# We need to sort the keys, and iterate over the sorted list.
media = sorted(self._css)
return chain.from_iterable(
[
path.__html__()
if hasattr(path, "__html__")
else format_html(
'<link href="{}" media="{}" rel="stylesheet">',
self.absolute_path(path),
medium,
)
for path in self._css[medium]
]
for medium in media
)
def absolute_path(self, path):
"""
Given a relative or absolute path to a static asset, return an absolute
path. An absolute path will be returned unchanged while a relative path
will be passed to django.templatetags.static.static().
"""
if path.startswith(("http://", "https://", "/")):
return path
return static(path)
def __getitem__(self, name):
"""Return a Media object that only contains media of the given type."""
if name in MEDIA_TYPES:
return Media(**{str(name): getattr(self, "_" + name)})
raise KeyError('Unknown media type "%s"' % name)
@staticmethod
def merge(*lists):
"""
Merge lists while trying to keep the relative order of the elements.
Warn if the lists have the same elements in a different relative order.
For static assets it can be important to have them included in the DOM
in a certain order. In JavaScript you may not be able to reference a
global or in CSS you might want to override a style.
"""
dependency_graph = defaultdict(set)
all_items = OrderedSet()
for list_ in filter(None, lists):
head = list_[0]
# The first items depend on nothing but have to be part of the
# dependency graph to be included in the result.
dependency_graph.setdefault(head, set())
for item in list_:
all_items.add(item)
# No self dependencies
if head != item:
dependency_graph[item].add(head)
head = item
try:
return stable_topological_sort(all_items, dependency_graph)
except CyclicDependencyError:
warnings.warn(
"Detected duplicate Media files in an opposite order: {}".format(
", ".join(repr(list_) for list_ in lists)
),
MediaOrderConflictWarning,
)
return list(all_items)
def __add__(self, other):
combined = Media()
combined._css_lists = self._css_lists[:]
combined._js_lists = self._js_lists[:]
for item in other._css_lists:
if item and item not in self._css_lists:
combined._css_lists.append(item)
for item in other._js_lists:
if item and item not in self._js_lists:
combined._js_lists.append(item)
return combined
def media_property(cls):
def _media(self):
# Get the media property of the superclass, if it exists
sup_cls = super(cls, self)
try:
base = sup_cls.media
except AttributeError:
base = Media()
# Get the media definition for this class
definition = getattr(cls, "Media", None)
if definition:
extend = getattr(definition, "extend", True)
if extend:
if extend is True:
m = base
else:
m = Media()
for medium in extend:
m = m + base[medium]
return m + Media(definition)
return Media(definition)
return base
return property(_media)
class MediaDefiningClass(type):
"""
Metaclass for classes that can have media definitions.
"""
def __new__(mcs, name, bases, attrs):
new_class = super().__new__(mcs, name, bases, attrs)
if "media" not in attrs:
new_class.media = media_property(new_class)
return new_class
class Widget(metaclass=MediaDefiningClass):
needs_multipart_form = False # Determines does this widget need multipart form
is_localized = False
is_required = False
supports_microseconds = True
use_fieldset = False
def __init__(self, attrs=None):
self.attrs = {} if attrs is None else attrs.copy()
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
memo[id(self)] = obj
return obj
@property
def is_hidden(self):
return self.input_type == "hidden" if hasattr(self, "input_type") else False
def subwidgets(self, name, value, attrs=None):
context = self.get_context(name, value, attrs)
yield context["widget"]
def format_value(self, value):
"""
Return a value as it should appear when rendered in a template.
"""
if value == "" or value is None:
return None
if self.is_localized:
return formats.localize_input(value)
return str(value)
def get_context(self, name, value, attrs):
return {
"widget": {
"name": name,
"is_hidden": self.is_hidden,
"required": self.is_required,
"value": self.format_value(value),
"attrs": self.build_attrs(self.attrs, attrs),
"template_name": self.template_name,
},
}
def render(self, name, value, attrs=None, renderer=None):
"""Render the widget as an HTML string."""
context = self.get_context(name, value, attrs)
return self._render(self.template_name, context, renderer)
def _render(self, template_name, context, renderer=None):
if renderer is None:
renderer = get_default_renderer()
return mark_safe(renderer.render(template_name, context))
def build_attrs(self, base_attrs, extra_attrs=None):
"""Build an attribute dictionary."""
return {**base_attrs, **(extra_attrs or {})}
def value_from_datadict(self, data, files, name):
"""
Given a dictionary of data and this widget's name, return the value
of this widget or None if it's not provided.
"""
return data.get(name)
def value_omitted_from_data(self, data, files, name):
return name not in data
def id_for_label(self, id_):
"""
Return the HTML ID attribute of this Widget for use by a <label>,
given the ID of the field. Return None if no ID is available.
This hook is necessary because some widgets have multiple HTML
elements and, thus, multiple IDs. In that case, this method should
return an ID value that corresponds to the first ID in the widget's
tags.
"""
return id_
def use_required_attribute(self, initial):
return not self.is_hidden
class Input(Widget):
"""
Base class for all <input> widgets.
"""
input_type = None # Subclasses must define this.
template_name = "django/forms/widgets/input.html"
def __init__(self, attrs=None):
if attrs is not None:
attrs = attrs.copy()
self.input_type = attrs.pop("type", self.input_type)
super().__init__(attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context["widget"]["type"] = self.input_type
return context
class TextInput(Input):
input_type = "text"
template_name = "django/forms/widgets/text.html"
class NumberInput(Input):
input_type = "number"
template_name = "django/forms/widgets/number.html"
class EmailInput(Input):
input_type = "email"
template_name = "django/forms/widgets/email.html"
class URLInput(Input):
input_type = "url"
template_name = "django/forms/widgets/url.html"
class PasswordInput(Input):
input_type = "password"
template_name = "django/forms/widgets/password.html"
def __init__(self, attrs=None, render_value=False):
super().__init__(attrs)
self.render_value = render_value
def get_context(self, name, value, attrs):
if not self.render_value:
value = None
return super().get_context(name, value, attrs)
class HiddenInput(Input):
input_type = "hidden"
template_name = "django/forms/widgets/hidden.html"
class MultipleHiddenInput(HiddenInput):
"""
Handle <input type="hidden"> for fields that have a list
of values.
"""
template_name = "django/forms/widgets/multiple_hidden.html"
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
final_attrs = context["widget"]["attrs"]
id_ = context["widget"]["attrs"].get("id")
subwidgets = []
for index, value_ in enumerate(context["widget"]["value"]):
widget_attrs = final_attrs.copy()
if id_:
# An ID attribute was given. Add a numeric index as a suffix
# so that the inputs don't all have the same ID attribute.
widget_attrs["id"] = "%s_%s" % (id_, index)
widget = HiddenInput()
widget.is_required = self.is_required
subwidgets.append(widget.get_context(name, value_, widget_attrs)["widget"])
context["widget"]["subwidgets"] = subwidgets
return context
def value_from_datadict(self, data, files, name):
try:
getter = data.getlist
except AttributeError:
getter = data.get
return getter(name)
def format_value(self, value):
return [] if value is None else value
class FileInput(Input):
input_type = "file"
needs_multipart_form = True
template_name = "django/forms/widgets/file.html"
def format_value(self, value):
"""File input never renders a value."""
return
def value_from_datadict(self, data, files, name):
"File widgets take data from FILES, not POST"
return files.get(name)
def value_omitted_from_data(self, data, files, name):
return name not in files
def use_required_attribute(self, initial):
return super().use_required_attribute(initial) and not initial
FILE_INPUT_CONTRADICTION = object()
class ClearableFileInput(FileInput):
clear_checkbox_label = _("Clear")
initial_text = _("Currently")
input_text = _("Change")
template_name = "django/forms/widgets/clearable_file_input.html"
def clear_checkbox_name(self, name):
"""
Given the name of the file input, return the name of the clear checkbox
input.
"""
return name + "-clear"
def clear_checkbox_id(self, name):
"""
Given the name of the clear checkbox input, return the HTML id for it.
"""
return name + "_id"
def is_initial(self, value):
"""
Return whether value is considered to be initial value.
"""
return bool(value and getattr(value, "url", False))
def format_value(self, value):
"""
Return the file object if it has a defined url attribute.
"""
if self.is_initial(value):
return value
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
checkbox_name = self.clear_checkbox_name(name)
checkbox_id = self.clear_checkbox_id(checkbox_name)
context["widget"].update(
{
"checkbox_name": checkbox_name,
"checkbox_id": checkbox_id,
"is_initial": self.is_initial(value),
"input_text": self.input_text,
"initial_text": self.initial_text,
"clear_checkbox_label": self.clear_checkbox_label,
}
)
return context
def value_from_datadict(self, data, files, name):
upload = super().value_from_datadict(data, files, name)
if not self.is_required and CheckboxInput().value_from_datadict(
data, files, self.clear_checkbox_name(name)
):
if upload:
# If the user contradicts themselves (uploads a new file AND
# checks the "clear" checkbox), we return a unique marker
# object that FileField will turn into a ValidationError.
return FILE_INPUT_CONTRADICTION
# False signals to clear any existing value, as opposed to just None
return False
return upload
def value_omitted_from_data(self, data, files, name):
return (
super().value_omitted_from_data(data, files, name)
and self.clear_checkbox_name(name) not in data
)
class Textarea(Widget):
template_name = "django/forms/widgets/textarea.html"
def __init__(self, attrs=None):
# Use slightly better defaults than HTML's 20x2 box
default_attrs = {"cols": "40", "rows": "10"}
if attrs:
default_attrs.update(attrs)
super().__init__(default_attrs)
class DateTimeBaseInput(TextInput):
format_key = ""
supports_microseconds = False
def __init__(self, attrs=None, format=None):
super().__init__(attrs)
self.format = format or None
def format_value(self, value):
return formats.localize_input(
value, self.format or formats.get_format(self.format_key)[0]
)
class DateInput(DateTimeBaseInput):
format_key = "DATE_INPUT_FORMATS"
template_name = "django/forms/widgets/date.html"
class DateTimeInput(DateTimeBaseInput):
format_key = "DATETIME_INPUT_FORMATS"
template_name = "django/forms/widgets/datetime.html"
class TimeInput(DateTimeBaseInput):
format_key = "TIME_INPUT_FORMATS"
template_name = "django/forms/widgets/time.html"
# Defined at module level so that CheckboxInput is picklable (#17976)
def boolean_check(v):
return not (v is False or v is None or v == "")
class CheckboxInput(Input):
input_type = "checkbox"
template_name = "django/forms/widgets/checkbox.html"
def __init__(self, attrs=None, check_test=None):
super().__init__(attrs)
# check_test is a callable that takes a value and returns True
# if the checkbox should be checked for that value.
self.check_test = boolean_check if check_test is None else check_test
def format_value(self, value):
"""Only return the 'value' attribute if value isn't empty."""
if value is True or value is False or value is None or value == "":
return
return str(value)
def get_context(self, name, value, attrs):
if self.check_test(value):
attrs = {**(attrs or {}), "checked": True}
return super().get_context(name, value, attrs)
def value_from_datadict(self, data, files, name):
if name not in data:
# A missing value means False because HTML form submission does not
# send results for unselected checkboxes.
return False
value = data.get(name)
# Translate true and false strings to boolean values.
values = {"true": True, "false": False}
if isinstance(value, str):
value = values.get(value.lower(), value)
return bool(value)
def value_omitted_from_data(self, data, files, name):
# HTML checkboxes don't appear in POST data if not checked, so it's
# never known if the value is actually omitted.
return False
class ChoiceWidget(Widget):
allow_multiple_selected = False
input_type = None
template_name = None
option_template_name = None
add_id_index = True
checked_attribute = {"checked": True}
option_inherits_attrs = True
def __init__(self, attrs=None, choices=()):
super().__init__(attrs)
# choices can be any iterable, but we may need to render this widget
# multiple times. Thus, collapse it into a list so it can be consumed
# more than once.
self.choices = list(choices)
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.attrs = self.attrs.copy()
obj.choices = copy.copy(self.choices)
memo[id(self)] = obj
return obj
def subwidgets(self, name, value, attrs=None):
"""
Yield all "subwidgets" of this widget. Used to enable iterating
options from a BoundField for choice widgets.
"""
value = self.format_value(value)
yield from self.options(name, value, attrs)
def options(self, name, value, attrs=None):
"""Yield a flat list of options for this widget."""
for group in self.optgroups(name, value, attrs):
yield from group[1]
def optgroups(self, name, value, attrs=None):
"""Return a list of optgroups for this widget."""
groups = []
has_selected = False
for index, (option_value, option_label) in enumerate(self.choices):
if option_value is None:
option_value = ""
subgroup = []
if isinstance(option_label, (list, tuple)):
group_name = option_value
subindex = 0
choices = option_label
else:
group_name = None
subindex = None
choices = [(option_value, option_label)]
groups.append((group_name, subgroup, index))
for subvalue, sublabel in choices:
selected = (not has_selected or self.allow_multiple_selected) and str(
subvalue
) in value
has_selected |= selected
subgroup.append(
self.create_option(
name,
subvalue,
sublabel,
selected,
index,
subindex=subindex,
attrs=attrs,
)
)
if subindex is not None:
subindex += 1
return groups
def create_option(
self, name, value, label, selected, index, subindex=None, attrs=None
):
index = str(index) if subindex is None else "%s_%s" % (index, subindex)
option_attrs = (
self.build_attrs(self.attrs, attrs) if self.option_inherits_attrs else {}
)
if selected:
option_attrs.update(self.checked_attribute)
if "id" in option_attrs:
option_attrs["id"] = self.id_for_label(option_attrs["id"], index)
return {
"name": name,
"value": value,
"label": label,
"selected": selected,
"index": index,
"attrs": option_attrs,
"type": self.input_type,
"template_name": self.option_template_name,
"wrap_label": True,
}
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context["widget"]["optgroups"] = self.optgroups(
name, context["widget"]["value"], attrs
)
return context
def id_for_label(self, id_, index="0"):
"""
Use an incremented id for each option where the main widget
references the zero index.
"""
if id_ and self.add_id_index:
id_ = "%s_%s" % (id_, index)
return id_
def value_from_datadict(self, data, files, name):
getter = data.get
if self.allow_multiple_selected:
try:
getter = data.getlist
except AttributeError:
pass
return getter(name)
def format_value(self, value):
"""Return selected values as a list."""
if value is None and self.allow_multiple_selected:
return []
if not isinstance(value, (tuple, list)):
value = [value]
return [str(v) if v is not None else "" for v in value]
class Select(ChoiceWidget):
input_type = "select"
template_name = "django/forms/widgets/select.html"
option_template_name = "django/forms/widgets/select_option.html"
add_id_index = False
checked_attribute = {"selected": True}
option_inherits_attrs = False
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if self.allow_multiple_selected:
context["widget"]["attrs"]["multiple"] = True
return context
@staticmethod
def _choice_has_empty_value(choice):
"""Return True if the choice's value is empty string or None."""
value, _ = choice
return value is None or value == ""
def use_required_attribute(self, initial):
"""
Don't render 'required' if the first <option> has a value, as that's
invalid HTML.
"""
use_required_attribute = super().use_required_attribute(initial)
# 'required' is always okay for <select multiple>.
if self.allow_multiple_selected:
return use_required_attribute
first_choice = next(iter(self.choices), None)
return (
use_required_attribute
and first_choice is not None
and self._choice_has_empty_value(first_choice)
)
class NullBooleanSelect(Select):
"""
A Select Widget intended to be used with NullBooleanField.
"""
def __init__(self, attrs=None):
choices = (
("unknown", _("Unknown")),
("true", _("Yes")),
("false", _("No")),
)
super().__init__(attrs, choices)
def format_value(self, value):
try:
return {
True: "true",
False: "false",
"true": "true",
"false": "false",
# For backwards compatibility with Django < 2.2.
"2": "true",
"3": "false",
}[value]
except KeyError:
return "unknown"
def value_from_datadict(self, data, files, name):
value = data.get(name)
return {
True: True,
"True": True,
"False": False,
False: False,
"true": True,
"false": False,
# For backwards compatibility with Django < 2.2.
"2": True,
"3": False,
}.get(value)
class SelectMultiple(Select):
allow_multiple_selected = True
def value_from_datadict(self, data, files, name):
try:
getter = data.getlist
except AttributeError:
getter = data.get
return getter(name)
def value_omitted_from_data(self, data, files, name):
# An unselected <select multiple> doesn't appear in POST data, so it's
# never known if the value is actually omitted.
return False
class RadioSelect(ChoiceWidget):
input_type = "radio"
template_name = "django/forms/widgets/radio.html"
option_template_name = "django/forms/widgets/radio_option.html"
use_fieldset = True
def id_for_label(self, id_, index=None):
"""
Don't include for="field_0" in <label> to improve accessibility when
using a screen reader, in addition clicking such a label would toggle
the first input.
"""
if index is None:
return ""
return super().id_for_label(id_, index)
class CheckboxSelectMultiple(RadioSelect):
allow_multiple_selected = True
input_type = "checkbox"
template_name = "django/forms/widgets/checkbox_select.html"
option_template_name = "django/forms/widgets/checkbox_option.html"
def use_required_attribute(self, initial):
# Don't use the 'required' attribute because browser validation would
# require all checkboxes to be checked instead of at least one.
return False
def value_omitted_from_data(self, data, files, name):
# HTML checkboxes don't appear in POST data if not checked, so it's
# never known if the value is actually omitted.
return False
class MultiWidget(Widget):
"""
A widget that is composed of multiple widgets.
In addition to the values added by Widget.get_context(), this widget
adds a list of subwidgets to the context as widget['subwidgets'].
These can be looped over and rendered like normal widgets.
You'll probably want to use this class with MultiValueField.
"""
template_name = "django/forms/widgets/multiwidget.html"
use_fieldset = True
def __init__(self, widgets, attrs=None):
if isinstance(widgets, dict):
self.widgets_names = [("_%s" % name) if name else "" for name in widgets]
widgets = widgets.values()
else:
self.widgets_names = ["_%s" % i for i in range(len(widgets))]
self.widgets = [w() if isinstance(w, type) else w for w in widgets]
super().__init__(attrs)
@property
def is_hidden(self):
return all(w.is_hidden for w in self.widgets)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
# value is a list/tuple of values, each corresponding to a widget
# in self.widgets.
if not isinstance(value, (list, tuple)):
value = self.decompress(value)
final_attrs = context["widget"]["attrs"]
input_type = final_attrs.pop("type", None)
id_ = final_attrs.get("id")
subwidgets = []
for i, (widget_name, widget) in enumerate(
zip(self.widgets_names, self.widgets)
):
if input_type is not None:
widget.input_type = input_type
widget_name = name + widget_name
try:
widget_value = value[i]
except IndexError:
widget_value = None
if id_:
widget_attrs = final_attrs.copy()
widget_attrs["id"] = "%s_%s" % (id_, i)
else:
widget_attrs = final_attrs
subwidgets.append(
widget.get_context(widget_name, widget_value, widget_attrs)["widget"]
)
context["widget"]["subwidgets"] = subwidgets
return context
def id_for_label(self, id_):
return ""
def value_from_datadict(self, data, files, name):
return [
widget.value_from_datadict(data, files, name + widget_name)
for widget_name, widget in zip(self.widgets_names, self.widgets)
]
def value_omitted_from_data(self, data, files, name):
return all(
widget.value_omitted_from_data(data, files, name + widget_name)
for widget_name, widget in zip(self.widgets_names, self.widgets)
)
def decompress(self, value):
"""
Return a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
raise NotImplementedError("Subclasses must implement this method.")
def _get_media(self):
"""
Media for a multiwidget is the combination of all media of the
subwidgets.
"""
media = Media()
for w in self.widgets:
media = media + w.media
return media
media = property(_get_media)
def __deepcopy__(self, memo):
obj = super().__deepcopy__(memo)
obj.widgets = copy.deepcopy(self.widgets)
return obj
@property
def needs_multipart_form(self):
return any(w.needs_multipart_form for w in self.widgets)
class SplitDateTimeWidget(MultiWidget):
"""
A widget that splits datetime input into two <input type="text"> boxes.
"""
supports_microseconds = False
template_name = "django/forms/widgets/splitdatetime.html"
def __init__(
self,
attrs=None,
date_format=None,
time_format=None,
date_attrs=None,
time_attrs=None,
):
widgets = (
DateInput(
attrs=attrs if date_attrs is None else date_attrs,
format=date_format,
),
TimeInput(
attrs=attrs if time_attrs is None else time_attrs,
format=time_format,
),
)
super().__init__(widgets)
def decompress(self, value):
if value:
value = to_current_timezone(value)
return [value.date(), value.time()]
return [None, None]
class SplitHiddenDateTimeWidget(SplitDateTimeWidget):
"""
A widget that splits datetime input into two <input type="hidden"> inputs.
"""
template_name = "django/forms/widgets/splithiddendatetime.html"
def __init__(
self,
attrs=None,
date_format=None,
time_format=None,
date_attrs=None,
time_attrs=None,
):
super().__init__(attrs, date_format, time_format, date_attrs, time_attrs)
for widget in self.widgets:
widget.input_type = "hidden"
class SelectDateWidget(Widget):
"""
A widget that splits date input into three <select> boxes.
This also serves as an example of a Widget that has more than one HTML
element and hence implements value_from_datadict.
"""
none_value = ("", "---")
month_field = "%s_month"
day_field = "%s_day"
year_field = "%s_year"
template_name = "django/forms/widgets/select_date.html"
input_type = "select"
select_widget = Select
date_re = _lazy_re_compile(r"(\d{4}|0)-(\d\d?)-(\d\d?)$")
use_fieldset = True
def __init__(self, attrs=None, years=None, months=None, empty_label=None):
self.attrs = attrs or {}
# Optional list or tuple of years to use in the "year" select box.
if years:
self.years = years
else:
this_year = datetime.date.today().year
self.years = range(this_year, this_year + 10)
# Optional dict of months to use in the "month" select box.
if months:
self.months = months
else:
self.months = MONTHS
# Optional string, list, or tuple to use as empty_label.
if isinstance(empty_label, (list, tuple)):
if not len(empty_label) == 3:
raise ValueError("empty_label list/tuple must have 3 elements.")
self.year_none_value = ("", empty_label[0])
self.month_none_value = ("", empty_label[1])
self.day_none_value = ("", empty_label[2])
else:
if empty_label is not None:
self.none_value = ("", empty_label)
self.year_none_value = self.none_value
self.month_none_value = self.none_value
self.day_none_value = self.none_value
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
date_context = {}
year_choices = [(i, str(i)) for i in self.years]
if not self.is_required:
year_choices.insert(0, self.year_none_value)
year_name = self.year_field % name
date_context["year"] = self.select_widget(
attrs, choices=year_choices
).get_context(
name=year_name,
value=context["widget"]["value"]["year"],
attrs={**context["widget"]["attrs"], "id": "id_%s" % year_name},
)
month_choices = list(self.months.items())
if not self.is_required:
month_choices.insert(0, self.month_none_value)
month_name = self.month_field % name
date_context["month"] = self.select_widget(
attrs, choices=month_choices
).get_context(
name=month_name,
value=context["widget"]["value"]["month"],
attrs={**context["widget"]["attrs"], "id": "id_%s" % month_name},
)
day_choices = [(i, i) for i in range(1, 32)]
if not self.is_required:
day_choices.insert(0, self.day_none_value)
day_name = self.day_field % name
date_context["day"] = self.select_widget(
attrs,
choices=day_choices,
).get_context(
name=day_name,
value=context["widget"]["value"]["day"],
attrs={**context["widget"]["attrs"], "id": "id_%s" % day_name},
)
subwidgets = []
for field in self._parse_date_fmt():
subwidgets.append(date_context[field]["widget"])
context["widget"]["subwidgets"] = subwidgets
return context
def format_value(self, value):
"""
Return a dict containing the year, month, and day of the current value.
Use dict instead of a datetime to allow invalid dates such as February
31 to display correctly.
"""
year, month, day = None, None, None
if isinstance(value, (datetime.date, datetime.datetime)):
year, month, day = value.year, value.month, value.day
elif isinstance(value, str):
match = self.date_re.match(value)
if match:
# Convert any zeros in the date to empty strings to match the
# empty option value.
year, month, day = [int(val) or "" for val in match.groups()]
else:
input_format = get_format("DATE_INPUT_FORMATS")[0]
try:
d = datetime.datetime.strptime(value, input_format)
except ValueError:
pass
else:
year, month, day = d.year, d.month, d.day
return {"year": year, "month": month, "day": day}
@staticmethod
def _parse_date_fmt():
fmt = get_format("DATE_FORMAT")
escaped = False
for char in fmt:
if escaped:
escaped = False
elif char == "\\":
escaped = True
elif char in "Yy":
yield "year"
elif char in "bEFMmNn":
yield "month"
elif char in "dj":
yield "day"
def id_for_label(self, id_):
for first_select in self._parse_date_fmt():
return "%s_%s" % (id_, first_select)
return "%s_month" % id_
def value_from_datadict(self, data, files, name):
y = data.get(self.year_field % name)
m = data.get(self.month_field % name)
d = data.get(self.day_field % name)
if y == m == d == "":
return None
if y is not None and m is not None and d is not None:
input_format = get_format("DATE_INPUT_FORMATS")[0]
input_format = formats.sanitize_strftime_format(input_format)
try:
date_value = datetime.date(int(y), int(m), int(d))
except ValueError:
# Return pseudo-ISO dates with zeros for any unselected values,
# e.g. '2017-0-23'.
return "%s-%s-%s" % (y or 0, m or 0, d or 0)
return date_value.strftime(input_format)
return data.get(name)
def value_omitted_from_data(self, data, files, name):
return not any(
("{}_{}".format(name, interval) in data)
for interval in ("year", "month", "day")
)
|
850bf394ae0105f6e6604d60ae8da11a19d76bc11bf3f3fe6733c1fc796d3f6b | import functools
from pathlib import Path
from django.conf import settings
from django.template.backends.django import DjangoTemplates
from django.template.loader import get_template
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
@functools.lru_cache
def get_default_renderer():
renderer_class = import_string(settings.FORM_RENDERER)
return renderer_class()
class BaseRenderer:
form_template_name = "django/forms/default.html"
formset_template_name = "django/forms/formsets/default.html"
def get_template(self, template_name):
raise NotImplementedError("subclasses must implement get_template()")
def render(self, template_name, context, request=None):
template = self.get_template(template_name)
return template.render(context, request=request).strip()
class EngineMixin:
def get_template(self, template_name):
return self.engine.get_template(template_name)
@cached_property
def engine(self):
return self.backend(
{
"APP_DIRS": True,
"DIRS": [Path(__file__).parent / self.backend.app_dirname],
"NAME": "djangoforms",
"OPTIONS": {},
}
)
class DjangoTemplates(EngineMixin, BaseRenderer):
"""
Load Django templates from the built-in widget templates in
django/forms/templates and from apps' 'templates' directory.
"""
backend = DjangoTemplates
class Jinja2(EngineMixin, BaseRenderer):
"""
Load Jinja2 templates from the built-in widget templates in
django/forms/jinja2 and from apps' 'jinja2' directory.
"""
@cached_property
def backend(self):
from django.template.backends.jinja2 import Jinja2
return Jinja2
class TemplatesSetting(BaseRenderer):
"""
Load templates using template.loader.get_template() which is configured
based on settings.TEMPLATES.
"""
def get_template(self, template_name):
return get_template(template_name)
|
8d07167ed71497f1ea27451a62ff6030f2acdc25fc49e0fddf3bbc294a41704f | """
Form classes
"""
import copy
import datetime
import warnings
from django.core.exceptions import NON_FIELD_ERRORS, ValidationError
from django.forms.fields import Field, FileField
from django.forms.utils import ErrorDict, ErrorList, RenderableFormMixin
from django.forms.widgets import Media, MediaDefiningClass
from django.utils.datastructures import MultiValueDict
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.functional import cached_property
from django.utils.html import conditional_escape
from django.utils.safestring import SafeString, mark_safe
from django.utils.translation import gettext as _
from .renderers import get_default_renderer
__all__ = ("BaseForm", "Form")
class DeclarativeFieldsMetaclass(MediaDefiningClass):
"""Collect Fields declared on the base classes."""
def __new__(mcs, name, bases, attrs):
# Collect fields from current class and remove them from attrs.
attrs["declared_fields"] = {
key: attrs.pop(key)
for key, value in list(attrs.items())
if isinstance(value, Field)
}
new_class = super().__new__(mcs, name, bases, attrs)
# Walk through the MRO.
declared_fields = {}
for base in reversed(new_class.__mro__):
# Collect fields from base class.
if hasattr(base, "declared_fields"):
declared_fields.update(base.declared_fields)
# Field shadowing.
for attr, value in base.__dict__.items():
if value is None and attr in declared_fields:
declared_fields.pop(attr)
new_class.base_fields = declared_fields
new_class.declared_fields = declared_fields
return new_class
class BaseForm(RenderableFormMixin):
"""
The main implementation of all the Form logic. Note that this class is
different than Form. See the comments by the Form class for more info. Any
improvements to the form API should be made to this class, not to the Form
class.
"""
default_renderer = None
field_order = None
prefix = None
use_required_attribute = True
template_name_p = "django/forms/p.html"
template_name_table = "django/forms/table.html"
template_name_ul = "django/forms/ul.html"
template_name_label = "django/forms/label.html"
def __init__(
self,
data=None,
files=None,
auto_id="id_%s",
prefix=None,
initial=None,
error_class=ErrorList,
label_suffix=None,
empty_permitted=False,
field_order=None,
use_required_attribute=None,
renderer=None,
):
self.is_bound = data is not None or files is not None
self.data = MultiValueDict() if data is None else data
self.files = MultiValueDict() if files is None else files
self.auto_id = auto_id
if prefix is not None:
self.prefix = prefix
self.initial = initial or {}
self.error_class = error_class
# Translators: This is the default suffix added to form field labels
self.label_suffix = label_suffix if label_suffix is not None else _(":")
self.empty_permitted = empty_permitted
self._errors = None # Stores the errors after clean() has been called.
# The base_fields class attribute is the *class-wide* definition of
# fields. Because a particular *instance* of the class might want to
# alter self.fields, we create self.fields here by copying base_fields.
# Instances should always modify self.fields; they should not modify
# self.base_fields.
self.fields = copy.deepcopy(self.base_fields)
self._bound_fields_cache = {}
self.order_fields(self.field_order if field_order is None else field_order)
if use_required_attribute is not None:
self.use_required_attribute = use_required_attribute
if self.empty_permitted and self.use_required_attribute:
raise ValueError(
"The empty_permitted and use_required_attribute arguments may "
"not both be True."
)
# Initialize form renderer. Use a global default if not specified
# either as an argument or as self.default_renderer.
if renderer is None:
if self.default_renderer is None:
renderer = get_default_renderer()
else:
renderer = self.default_renderer
if isinstance(self.default_renderer, type):
renderer = renderer()
self.renderer = renderer
def order_fields(self, field_order):
"""
Rearrange the fields according to field_order.
field_order is a list of field names specifying the order. Append fields
not included in the list in the default order for backward compatibility
with subclasses not overriding field_order. If field_order is None,
keep all fields in the order defined in the class. Ignore unknown
fields in field_order to allow disabling fields in form subclasses
without redefining ordering.
"""
if field_order is None:
return
fields = {}
for key in field_order:
try:
fields[key] = self.fields.pop(key)
except KeyError: # ignore unknown fields
pass
fields.update(self.fields) # add remaining fields in original order
self.fields = fields
def __repr__(self):
if self._errors is None:
is_valid = "Unknown"
else:
is_valid = self.is_bound and not self._errors
return "<%(cls)s bound=%(bound)s, valid=%(valid)s, fields=(%(fields)s)>" % {
"cls": self.__class__.__name__,
"bound": self.is_bound,
"valid": is_valid,
"fields": ";".join(self.fields),
}
def _bound_items(self):
"""Yield (name, bf) pairs, where bf is a BoundField object."""
for name in self.fields:
yield name, self[name]
def __iter__(self):
"""Yield the form's fields as BoundField objects."""
for name in self.fields:
yield self[name]
def __getitem__(self, name):
"""Return a BoundField with the given name."""
try:
return self._bound_fields_cache[name]
except KeyError:
pass
try:
field = self.fields[name]
except KeyError:
raise KeyError(
"Key '%s' not found in '%s'. Choices are: %s."
% (
name,
self.__class__.__name__,
", ".join(sorted(self.fields)),
)
)
bound_field = field.get_bound_field(self, name)
self._bound_fields_cache[name] = bound_field
return bound_field
@property
def errors(self):
"""Return an ErrorDict for the data provided for the form."""
if self._errors is None:
self.full_clean()
return self._errors
def is_valid(self):
"""Return True if the form has no errors, or False otherwise."""
return self.is_bound and not self.errors
def add_prefix(self, field_name):
"""
Return the field name with a prefix appended, if this Form has a
prefix set.
Subclasses may wish to override.
"""
return "%s-%s" % (self.prefix, field_name) if self.prefix else field_name
def add_initial_prefix(self, field_name):
"""Add an 'initial' prefix for checking dynamic initial values."""
return "initial-%s" % self.add_prefix(field_name)
def _widget_data_value(self, widget, html_name):
# value_from_datadict() gets the data from the data dictionaries.
# Each widget type knows how to retrieve its own data, because some
# widgets split data over several HTML fields.
return widget.value_from_datadict(self.data, self.files, html_name)
def _html_output(
self, normal_row, error_row, row_ender, help_text_html, errors_on_separate_row
):
"Output HTML. Used by as_table(), as_ul(), as_p()."
warnings.warn(
"django.forms.BaseForm._html_output() is deprecated. "
"Please use .render() and .get_context() instead.",
RemovedInDjango50Warning,
stacklevel=2,
)
# Errors that should be displayed above all fields.
top_errors = self.non_field_errors().copy()
output, hidden_fields = [], []
for name, bf in self._bound_items():
field = bf.field
html_class_attr = ""
bf_errors = self.error_class(bf.errors)
if bf.is_hidden:
if bf_errors:
top_errors.extend(
[
_("(Hidden field %(name)s) %(error)s")
% {"name": name, "error": str(e)}
for e in bf_errors
]
)
hidden_fields.append(str(bf))
else:
# Create a 'class="..."' attribute if the row should have any
# CSS classes applied.
css_classes = bf.css_classes()
if css_classes:
html_class_attr = ' class="%s"' % css_classes
if errors_on_separate_row and bf_errors:
output.append(error_row % str(bf_errors))
if bf.label:
label = conditional_escape(bf.label)
label = bf.label_tag(label) or ""
else:
label = ""
if field.help_text:
help_text = help_text_html % field.help_text
else:
help_text = ""
output.append(
normal_row
% {
"errors": bf_errors,
"label": label,
"field": bf,
"help_text": help_text,
"html_class_attr": html_class_attr,
"css_classes": css_classes,
"field_name": bf.html_name,
}
)
if top_errors:
output.insert(0, error_row % top_errors)
if hidden_fields: # Insert any hidden fields in the last row.
str_hidden = "".join(hidden_fields)
if output:
last_row = output[-1]
# Chop off the trailing row_ender (e.g. '</td></tr>') and
# insert the hidden fields.
if not last_row.endswith(row_ender):
# This can happen in the as_p() case (and possibly others
# that users write): if there are only top errors, we may
# not be able to conscript the last row for our purposes,
# so insert a new, empty row.
last_row = normal_row % {
"errors": "",
"label": "",
"field": "",
"help_text": "",
"html_class_attr": html_class_attr,
"css_classes": "",
"field_name": "",
}
output.append(last_row)
output[-1] = last_row[: -len(row_ender)] + str_hidden + row_ender
else:
# If there aren't any rows in the output, just append the
# hidden fields.
output.append(str_hidden)
return mark_safe("\n".join(output))
@property
def template_name(self):
return self.renderer.form_template_name
def get_context(self):
fields = []
hidden_fields = []
top_errors = self.non_field_errors().copy()
for name, bf in self._bound_items():
bf_errors = self.error_class(bf.errors, renderer=self.renderer)
if bf.is_hidden:
if bf_errors:
top_errors += [
_("(Hidden field %(name)s) %(error)s")
% {"name": name, "error": str(e)}
for e in bf_errors
]
hidden_fields.append(bf)
else:
errors_str = str(bf_errors)
# RemovedInDjango50Warning.
if not isinstance(errors_str, SafeString):
warnings.warn(
f"Returning a plain string from "
f"{self.error_class.__name__} is deprecated. Please "
f"customize via the template system instead.",
RemovedInDjango50Warning,
)
errors_str = mark_safe(errors_str)
fields.append((bf, errors_str))
return {
"form": self,
"fields": fields,
"hidden_fields": hidden_fields,
"errors": top_errors,
}
def non_field_errors(self):
"""
Return an ErrorList of errors that aren't associated with a particular
field -- i.e., from Form.clean(). Return an empty ErrorList if there
are none.
"""
return self.errors.get(
NON_FIELD_ERRORS,
self.error_class(error_class="nonfield", renderer=self.renderer),
)
def add_error(self, field, error):
"""
Update the content of `self._errors`.
The `field` argument is the name of the field to which the errors
should be added. If it's None, treat the errors as NON_FIELD_ERRORS.
The `error` argument can be a single error, a list of errors, or a
dictionary that maps field names to lists of errors. An "error" can be
either a simple string or an instance of ValidationError with its
message attribute set and a "list or dictionary" can be an actual
`list` or `dict` or an instance of ValidationError with its
`error_list` or `error_dict` attribute set.
If `error` is a dictionary, the `field` argument *must* be None and
errors will be added to the fields that correspond to the keys of the
dictionary.
"""
if not isinstance(error, ValidationError):
# Normalize to ValidationError and let its constructor
# do the hard work of making sense of the input.
error = ValidationError(error)
if hasattr(error, "error_dict"):
if field is not None:
raise TypeError(
"The argument `field` must be `None` when the `error` "
"argument contains errors for multiple fields."
)
else:
error = error.error_dict
else:
error = {field or NON_FIELD_ERRORS: error.error_list}
for field, error_list in error.items():
if field not in self.errors:
if field != NON_FIELD_ERRORS and field not in self.fields:
raise ValueError(
"'%s' has no field named '%s'."
% (self.__class__.__name__, field)
)
if field == NON_FIELD_ERRORS:
self._errors[field] = self.error_class(
error_class="nonfield", renderer=self.renderer
)
else:
self._errors[field] = self.error_class(renderer=self.renderer)
self._errors[field].extend(error_list)
if field in self.cleaned_data:
del self.cleaned_data[field]
def has_error(self, field, code=None):
return field in self.errors and (
code is None
or any(error.code == code for error in self.errors.as_data()[field])
)
def full_clean(self):
"""
Clean all of self.data and populate self._errors and self.cleaned_data.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data has
# changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
self._post_clean()
def _clean_fields(self):
for name, bf in self._bound_items():
field = bf.field
value = bf.initial if field.disabled else bf.data
try:
if isinstance(field, FileField):
value = field.clean(value, bf.initial)
else:
value = field.clean(value)
self.cleaned_data[name] = value
if hasattr(self, "clean_%s" % name):
value = getattr(self, "clean_%s" % name)()
self.cleaned_data[name] = value
except ValidationError as e:
self.add_error(name, e)
def _clean_form(self):
try:
cleaned_data = self.clean()
except ValidationError as e:
self.add_error(None, e)
else:
if cleaned_data is not None:
self.cleaned_data = cleaned_data
def _post_clean(self):
"""
An internal hook for performing additional cleaning after form cleaning
is complete. Used for model validation in model forms.
"""
pass
def clean(self):
"""
Hook for doing any extra form-wide cleaning after Field.clean() has been
called on every field. Any ValidationError raised by this method will
not be associated with a particular field; it will have a special-case
association with the field named '__all__'.
"""
return self.cleaned_data
def has_changed(self):
"""Return True if data differs from initial."""
return bool(self.changed_data)
@cached_property
def changed_data(self):
return [name for name, bf in self._bound_items() if bf._has_changed()]
@property
def media(self):
"""Return all media required to render the widgets on this form."""
media = Media()
for field in self.fields.values():
media = media + field.widget.media
return media
def is_multipart(self):
"""
Return True if the form needs to be multipart-encoded, i.e. it has
FileInput, or False otherwise.
"""
return any(field.widget.needs_multipart_form for field in self.fields.values())
def hidden_fields(self):
"""
Return a list of all the BoundField objects that are hidden fields.
Useful for manual form layout in templates.
"""
return [field for field in self if field.is_hidden]
def visible_fields(self):
"""
Return a list of BoundField objects that aren't hidden fields.
The opposite of the hidden_fields() method.
"""
return [field for field in self if not field.is_hidden]
def get_initial_for_field(self, field, field_name):
"""
Return initial data for field on form. Use initial data from the form
or the field, in that order. Evaluate callable values.
"""
value = self.initial.get(field_name, field.initial)
if callable(value):
value = value()
# If this is an auto-generated default date, nix the microseconds
# for standardized handling. See #22502.
if (
isinstance(value, (datetime.datetime, datetime.time))
and not field.widget.supports_microseconds
):
value = value.replace(microsecond=0)
return value
class Form(BaseForm, metaclass=DeclarativeFieldsMetaclass):
"A collection of Fields, plus their associated data."
# This is a separate class from BaseForm in order to abstract the way
# self.fields is specified. This class (Form) is the one that does the
# fancy metaclass stuff purely for the semantic sugar -- it allows one
# to define a form using declarative syntax.
# BaseForm itself has no way of designating self.fields.
|
b3b37a36cdf222aaff28385ab6752f7e36cf8546eb86f7a60791ed09ea1d199a | # This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r"j E \d\e Y"
TIME_FORMAT = "G:i"
DATETIME_FORMAT = r"j E \d\e Y \a \l\e\s G:i"
YEAR_MONTH_FORMAT = r"F \d\e\l Y"
MONTH_DAY_FORMAT = r"j E"
SHORT_DATE_FORMAT = "d/m/Y"
SHORT_DATETIME_FORMAT = "d/m/Y G:i"
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see https://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
"%d/%m/%Y", # '31/12/2009'
"%d/%m/%y", # '31/12/09'
]
DATETIME_INPUT_FORMATS = [
"%d/%m/%Y %H:%M:%S",
"%d/%m/%Y %H:%M:%S.%f",
"%d/%m/%Y %H:%M",
"%d/%m/%y %H:%M:%S",
"%d/%m/%y %H:%M:%S.%f",
"%d/%m/%y %H:%M",
]
DECIMAL_SEPARATOR = ","
THOUSAND_SEPARATOR = "."
NUMBER_GROUPING = 3
|
0e626b17f51d0652ceef0039d053205aebef23d8cc95806113e539bfbc965536 | from django.db.migrations.utils import get_migration_name_timestamp
from django.db.transaction import atomic
from .exceptions import IrreversibleError
class Migration:
"""
The base class for all migrations.
Migration files will import this from django.db.migrations.Migration
and subclass it as a class called Migration. It will have one or more
of the following attributes:
- operations: A list of Operation instances, probably from
django.db.migrations.operations
- dependencies: A list of tuples of (app_path, migration_name)
- run_before: A list of tuples of (app_path, migration_name)
- replaces: A list of migration_names
Note that all migrations come out of migrations and into the Loader or
Graph as instances, having been initialized with their app label and name.
"""
# Operations to apply during this migration, in order.
operations = []
# Other migrations that should be run before this migration.
# Should be a list of (app, migration_name).
dependencies = []
# Other migrations that should be run after this one (i.e. have
# this migration added to their dependencies). Useful to make third-party
# apps' migrations run after your AUTH_USER replacement, for example.
run_before = []
# Migration names in this app that this migration replaces. If this is
# non-empty, this migration will only be applied if all these migrations
# are not applied.
replaces = []
# Is this an initial migration? Initial migrations are skipped on
# --fake-initial if the table or fields already exist. If None, check if
# the migration has any dependencies to determine if there are dependencies
# to tell if db introspection needs to be done. If True, always perform
# introspection. If False, never perform introspection.
initial = None
# Whether to wrap the whole migration in a transaction. Only has an effect
# on database backends which support transactional DDL.
atomic = True
def __init__(self, name, app_label):
self.name = name
self.app_label = app_label
# Copy dependencies & other attrs as we might mutate them at runtime
self.operations = list(self.__class__.operations)
self.dependencies = list(self.__class__.dependencies)
self.run_before = list(self.__class__.run_before)
self.replaces = list(self.__class__.replaces)
def __eq__(self, other):
return (
isinstance(other, Migration)
and self.name == other.name
and self.app_label == other.app_label
)
def __repr__(self):
return "<Migration %s.%s>" % (self.app_label, self.name)
def __str__(self):
return "%s.%s" % (self.app_label, self.name)
def __hash__(self):
return hash("%s.%s" % (self.app_label, self.name))
def mutate_state(self, project_state, preserve=True):
"""
Take a ProjectState and return a new one with the migration's
operations applied to it. Preserve the original object state by
default and return a mutated state from a copy.
"""
new_state = project_state
if preserve:
new_state = project_state.clone()
for operation in self.operations:
operation.state_forwards(self.app_label, new_state)
return new_state
def apply(self, project_state, schema_editor, collect_sql=False):
"""
Take a project_state representing all migrations prior to this one
and a schema_editor for a live database and apply the migration
in a forwards order.
Return the resulting project state for efficient reuse by following
Migrations.
"""
for operation in self.operations:
# If this operation cannot be represented as SQL, place a comment
# there instead
if collect_sql:
schema_editor.collected_sql.append("--")
schema_editor.collected_sql.append("-- %s" % operation.describe())
schema_editor.collected_sql.append("--")
if not operation.reduces_to_sql:
schema_editor.collected_sql.append(
"-- THIS OPERATION CANNOT BE WRITTEN AS SQL"
)
continue
collected_sql_before = len(schema_editor.collected_sql)
# Save the state before the operation has run
old_state = project_state.clone()
operation.state_forwards(self.app_label, project_state)
# Run the operation
atomic_operation = operation.atomic or (
self.atomic and operation.atomic is not False
)
if not schema_editor.atomic_migration and atomic_operation:
# Force a transaction on a non-transactional-DDL backend or an
# atomic operation inside a non-atomic migration.
with atomic(schema_editor.connection.alias):
operation.database_forwards(
self.app_label, schema_editor, old_state, project_state
)
else:
# Normal behaviour
operation.database_forwards(
self.app_label, schema_editor, old_state, project_state
)
if collect_sql and collected_sql_before == len(schema_editor.collected_sql):
schema_editor.collected_sql.append("-- (no-op)")
return project_state
def unapply(self, project_state, schema_editor, collect_sql=False):
"""
Take a project_state representing all migrations prior to this one
and a schema_editor for a live database and apply the migration
in a reverse order.
The backwards migration process consists of two phases:
1. The intermediate states from right before the first until right
after the last operation inside this migration are preserved.
2. The operations are applied in reverse order using the states
recorded in step 1.
"""
# Construct all the intermediate states we need for a reverse migration
to_run = []
new_state = project_state
# Phase 1
for operation in self.operations:
# If it's irreversible, error out
if not operation.reversible:
raise IrreversibleError(
"Operation %s in %s is not reversible" % (operation, self)
)
# Preserve new state from previous run to not tamper the same state
# over all operations
new_state = new_state.clone()
old_state = new_state.clone()
operation.state_forwards(self.app_label, new_state)
to_run.insert(0, (operation, old_state, new_state))
# Phase 2
for operation, to_state, from_state in to_run:
if collect_sql:
schema_editor.collected_sql.append("--")
schema_editor.collected_sql.append("-- %s" % operation.describe())
schema_editor.collected_sql.append("--")
if not operation.reduces_to_sql:
schema_editor.collected_sql.append(
"-- THIS OPERATION CANNOT BE WRITTEN AS SQL"
)
continue
collected_sql_before = len(schema_editor.collected_sql)
atomic_operation = operation.atomic or (
self.atomic and operation.atomic is not False
)
if not schema_editor.atomic_migration and atomic_operation:
# Force a transaction on a non-transactional-DDL backend or an
# atomic operation inside a non-atomic migration.
with atomic(schema_editor.connection.alias):
operation.database_backwards(
self.app_label, schema_editor, from_state, to_state
)
else:
# Normal behaviour
operation.database_backwards(
self.app_label, schema_editor, from_state, to_state
)
if collect_sql and collected_sql_before == len(schema_editor.collected_sql):
schema_editor.collected_sql.append("-- (no-op)")
return project_state
def suggest_name(self):
"""
Suggest a name for the operations this migration might represent. Names
are not guaranteed to be unique, but put some effort into the fallback
name to avoid VCS conflicts if possible.
"""
if self.initial:
return "initial"
raw_fragments = [op.migration_name_fragment for op in self.operations]
fragments = [name for name in raw_fragments if name]
if not fragments or len(fragments) != len(self.operations):
return "auto_%s" % get_migration_name_timestamp()
name = fragments[0]
for fragment in fragments[1:]:
new_name = f"{name}_{fragment}"
if len(new_name) > 52:
name = f"{name}_and_more"
break
name = new_name
return name
class SwappableTuple(tuple):
"""
Subclass of tuple so Django can tell this was originally a swappable
dependency when it reads the migration file.
"""
def __new__(cls, value, setting):
self = tuple.__new__(cls, value)
self.setting = setting
return self
def swappable_dependency(value):
"""Turn a setting value into a dependency."""
return SwappableTuple((value.split(".", 1)[0], "__first__"), value)
|
84431be61f608d52d04ca852f32619de2fe8d7ba6912b3b1f239587856179f8e | from django.db.backends.utils import names_digest, split_identifier
from django.db.models.expressions import Col, ExpressionList, F, Func, OrderBy
from django.db.models.functions import Collate
from django.db.models.query_utils import Q
from django.db.models.sql import Query
from django.utils.functional import partition
__all__ = ["Index"]
class Index:
suffix = "idx"
# The max length of the name of the index (restricted to 30 for
# cross-database compatibility with Oracle)
max_name_length = 30
def __init__(
self,
*expressions,
fields=(),
name=None,
db_tablespace=None,
opclasses=(),
condition=None,
include=None,
):
if opclasses and not name:
raise ValueError("An index must be named to use opclasses.")
if not isinstance(condition, (type(None), Q)):
raise ValueError("Index.condition must be a Q instance.")
if condition and not name:
raise ValueError("An index must be named to use condition.")
if not isinstance(fields, (list, tuple)):
raise ValueError("Index.fields must be a list or tuple.")
if not isinstance(opclasses, (list, tuple)):
raise ValueError("Index.opclasses must be a list or tuple.")
if not expressions and not fields:
raise ValueError(
"At least one field or expression is required to define an index."
)
if expressions and fields:
raise ValueError(
"Index.fields and expressions are mutually exclusive.",
)
if expressions and not name:
raise ValueError("An index must be named to use expressions.")
if expressions and opclasses:
raise ValueError(
"Index.opclasses cannot be used with expressions. Use "
"django.contrib.postgres.indexes.OpClass() instead."
)
if opclasses and len(fields) != len(opclasses):
raise ValueError(
"Index.fields and Index.opclasses must have the same number of "
"elements."
)
if fields and not all(isinstance(field, str) for field in fields):
raise ValueError("Index.fields must contain only strings with field names.")
if include and not name:
raise ValueError("A covering index must be named.")
if not isinstance(include, (type(None), list, tuple)):
raise ValueError("Index.include must be a list or tuple.")
self.fields = list(fields)
# A list of 2-tuple with the field name and ordering ('' or 'DESC').
self.fields_orders = [
(field_name[1:], "DESC") if field_name.startswith("-") else (field_name, "")
for field_name in self.fields
]
self.name = name or ""
self.db_tablespace = db_tablespace
self.opclasses = opclasses
self.condition = condition
self.include = tuple(include) if include else ()
self.expressions = tuple(
F(expression) if isinstance(expression, str) else expression
for expression in expressions
)
@property
def contains_expressions(self):
return bool(self.expressions)
def _get_condition_sql(self, model, schema_editor):
if self.condition is None:
return None
query = Query(model=model, alias_cols=False)
where = query.build_where(self.condition)
compiler = query.get_compiler(connection=schema_editor.connection)
sql, params = where.as_sql(compiler, schema_editor.connection)
return sql % tuple(schema_editor.quote_value(p) for p in params)
def create_sql(self, model, schema_editor, using="", **kwargs):
include = [
model._meta.get_field(field_name).column for field_name in self.include
]
condition = self._get_condition_sql(model, schema_editor)
if self.expressions:
index_expressions = []
for expression in self.expressions:
index_expression = IndexExpression(expression)
index_expression.set_wrapper_classes(schema_editor.connection)
index_expressions.append(index_expression)
expressions = ExpressionList(*index_expressions).resolve_expression(
Query(model, alias_cols=False),
)
fields = None
col_suffixes = None
else:
fields = [
model._meta.get_field(field_name)
for field_name, _ in self.fields_orders
]
if schema_editor.connection.features.supports_index_column_ordering:
col_suffixes = [order[1] for order in self.fields_orders]
else:
col_suffixes = [""] * len(self.fields_orders)
expressions = None
return schema_editor._create_index_sql(
model,
fields=fields,
name=self.name,
using=using,
db_tablespace=self.db_tablespace,
col_suffixes=col_suffixes,
opclasses=self.opclasses,
condition=condition,
include=include,
expressions=expressions,
**kwargs,
)
def remove_sql(self, model, schema_editor, **kwargs):
return schema_editor._delete_index_sql(model, self.name, **kwargs)
def deconstruct(self):
path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
path = path.replace("django.db.models.indexes", "django.db.models")
kwargs = {"name": self.name}
if self.fields:
kwargs["fields"] = self.fields
if self.db_tablespace is not None:
kwargs["db_tablespace"] = self.db_tablespace
if self.opclasses:
kwargs["opclasses"] = self.opclasses
if self.condition:
kwargs["condition"] = self.condition
if self.include:
kwargs["include"] = self.include
return (path, self.expressions, kwargs)
def clone(self):
"""Create a copy of this Index."""
_, args, kwargs = self.deconstruct()
return self.__class__(*args, **kwargs)
def set_name_with_model(self, model):
"""
Generate a unique name for the index.
The name is divided into 3 parts - table name (12 chars), field name
(8 chars) and unique hash + suffix (10 chars). Each part is made to
fit its size by truncating the excess length.
"""
_, table_name = split_identifier(model._meta.db_table)
column_names = [
model._meta.get_field(field_name).column
for field_name, order in self.fields_orders
]
column_names_with_order = [
(("-%s" if order else "%s") % column_name)
for column_name, (field_name, order) in zip(
column_names, self.fields_orders
)
]
# The length of the parts of the name is based on the default max
# length of 30 characters.
hash_data = [table_name] + column_names_with_order + [self.suffix]
self.name = "%s_%s_%s" % (
table_name[:11],
column_names[0][:7],
"%s_%s" % (names_digest(*hash_data, length=6), self.suffix),
)
if len(self.name) > self.max_name_length:
raise ValueError(
"Index too long for multiple database support. Is self.suffix "
"longer than 3 characters?"
)
if self.name[0] == "_" or self.name[0].isdigit():
self.name = "D%s" % self.name[1:]
def __repr__(self):
return "<%s:%s%s%s%s%s%s%s>" % (
self.__class__.__qualname__,
"" if not self.fields else " fields=%s" % repr(self.fields),
"" if not self.expressions else " expressions=%s" % repr(self.expressions),
"" if not self.name else " name=%s" % repr(self.name),
""
if self.db_tablespace is None
else " db_tablespace=%s" % repr(self.db_tablespace),
"" if self.condition is None else " condition=%s" % self.condition,
"" if not self.include else " include=%s" % repr(self.include),
"" if not self.opclasses else " opclasses=%s" % repr(self.opclasses),
)
def __eq__(self, other):
if self.__class__ == other.__class__:
return self.deconstruct() == other.deconstruct()
return NotImplemented
class IndexExpression(Func):
"""Order and wrap expressions for CREATE INDEX statements."""
template = "%(expressions)s"
wrapper_classes = (OrderBy, Collate)
def set_wrapper_classes(self, connection=None):
# Some databases (e.g. MySQL) treats COLLATE as an indexed expression.
if connection and connection.features.collate_as_index_expression:
self.wrapper_classes = tuple(
[
wrapper_cls
for wrapper_cls in self.wrapper_classes
if wrapper_cls is not Collate
]
)
@classmethod
def register_wrappers(cls, *wrapper_classes):
cls.wrapper_classes = wrapper_classes
def resolve_expression(
self,
query=None,
allow_joins=True,
reuse=None,
summarize=False,
for_save=False,
):
expressions = list(self.flatten())
# Split expressions and wrappers.
index_expressions, wrappers = partition(
lambda e: isinstance(e, self.wrapper_classes),
expressions,
)
wrapper_types = [type(wrapper) for wrapper in wrappers]
if len(wrapper_types) != len(set(wrapper_types)):
raise ValueError(
"Multiple references to %s can't be used in an indexed "
"expression."
% ", ".join(
[wrapper_cls.__qualname__ for wrapper_cls in self.wrapper_classes]
)
)
if expressions[1 : len(wrappers) + 1] != wrappers:
raise ValueError(
"%s must be topmost expressions in an indexed expression."
% ", ".join(
[wrapper_cls.__qualname__ for wrapper_cls in self.wrapper_classes]
)
)
# Wrap expressions in parentheses if they are not column references.
root_expression = index_expressions[1]
resolve_root_expression = root_expression.resolve_expression(
query,
allow_joins,
reuse,
summarize,
for_save,
)
if not isinstance(resolve_root_expression, Col):
root_expression = Func(root_expression, template="(%(expressions)s)")
if wrappers:
# Order wrappers and set their expressions.
wrappers = sorted(
wrappers,
key=lambda w: self.wrapper_classes.index(type(w)),
)
wrappers = [wrapper.copy() for wrapper in wrappers]
for i, wrapper in enumerate(wrappers[:-1]):
wrapper.set_source_expressions([wrappers[i + 1]])
# Set the root expression on the deepest wrapper.
wrappers[-1].set_source_expressions([root_expression])
self.set_source_expressions([wrappers[0]])
else:
# Use the root expression, if there are no wrappers.
self.set_source_expressions([root_expression])
return super().resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
def as_sqlite(self, compiler, connection, **extra_context):
# Casting to numeric is unnecessary.
return self.as_sql(compiler, connection, **extra_context)
|
d6d51f27c4624cea6c4a2980dfc90fbdc132eb0d0d5b3ea63476bf786b591c9e | """
The main QuerySet implementation. This provides the public API for the ORM.
"""
import copy
import operator
import warnings
from itertools import chain, islice
from asgiref.sync import sync_to_async
import django
from django.conf import settings
from django.core import exceptions
from django.db import (
DJANGO_VERSION_PICKLE_KEY,
IntegrityError,
NotSupportedError,
connections,
router,
transaction,
)
from django.db.models import AutoField, DateField, DateTimeField, sql
from django.db.models.constants import LOOKUP_SEP, OnConflict
from django.db.models.deletion import Collector
from django.db.models.expressions import Case, F, Ref, Value, When
from django.db.models.functions import Cast, Trunc
from django.db.models.query_utils import FilteredRelation, Q
from django.db.models.sql.constants import CURSOR, GET_ITERATOR_CHUNK_SIZE
from django.db.models.utils import create_namedtuple_class, resolve_callables
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.functional import cached_property, partition
# The maximum number of results to fetch in a get() query.
MAX_GET_RESULTS = 21
# The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20
class BaseIterable:
def __init__(
self, queryset, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE
):
self.queryset = queryset
self.chunked_fetch = chunked_fetch
self.chunk_size = chunk_size
async def _async_generator(self):
# Generators don't actually start running until the first time you call
# next() on them, so make the generator object in the async thread and
# then repeatedly dispatch to it in a sync thread.
sync_generator = self.__iter__()
def next_slice(gen):
return list(islice(gen, self.chunk_size))
while True:
chunk = await sync_to_async(next_slice)(sync_generator)
for item in chunk:
yield item
if len(chunk) < self.chunk_size:
break
# __aiter__() is a *synchronous* method that has to then return an
# *asynchronous* iterator/generator. Thus, nest an async generator inside
# it.
# This is a generic iterable converter for now, and is going to suffer a
# performance penalty on large sets of items due to the cost of crossing
# over the sync barrier for each chunk. Custom __aiter__() methods should
# be added to each Iterable subclass, but that needs some work in the
# Compiler first.
def __aiter__(self):
return self._async_generator()
class ModelIterable(BaseIterable):
"""Iterable that yields a model instance for each row."""
def __iter__(self):
queryset = self.queryset
db = queryset.db
compiler = queryset.query.get_compiler(using=db)
# Execute the query. This will also fill compiler.select, klass_info,
# and annotations.
results = compiler.execute_sql(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
)
select, klass_info, annotation_col_map = (
compiler.select,
compiler.klass_info,
compiler.annotation_col_map,
)
model_cls = klass_info["model"]
select_fields = klass_info["select_fields"]
model_fields_start, model_fields_end = select_fields[0], select_fields[-1] + 1
init_list = [
f[0].target.attname for f in select[model_fields_start:model_fields_end]
]
related_populators = get_related_populators(klass_info, select, db)
known_related_objects = [
(
field,
related_objs,
operator.attrgetter(
*[
field.attname
if from_field == "self"
else queryset.model._meta.get_field(from_field).attname
for from_field in field.from_fields
]
),
)
for field, related_objs in queryset._known_related_objects.items()
]
for row in compiler.results_iter(results):
obj = model_cls.from_db(
db, init_list, row[model_fields_start:model_fields_end]
)
for rel_populator in related_populators:
rel_populator.populate(row, obj)
if annotation_col_map:
for attr_name, col_pos in annotation_col_map.items():
setattr(obj, attr_name, row[col_pos])
# Add the known related objects to the model.
for field, rel_objs, rel_getter in known_related_objects:
# Avoid overwriting objects loaded by, e.g., select_related().
if field.is_cached(obj):
continue
rel_obj_id = rel_getter(obj)
try:
rel_obj = rel_objs[rel_obj_id]
except KeyError:
pass # May happen in qs1 | qs2 scenarios.
else:
setattr(obj, field.name, rel_obj)
yield obj
class ValuesIterable(BaseIterable):
"""
Iterable returned by QuerySet.values() that yields a dict for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
# extra(select=...) cols are always at the start of the row.
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
indexes = range(len(names))
for row in compiler.results_iter(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
):
yield {names[i]: row[i] for i in indexes}
class ValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=False) that yields a tuple
for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
if queryset._fields:
# extra(select=...) cols are always at the start of the row.
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
fields = [
*queryset._fields,
*(f for f in query.annotation_select if f not in queryset._fields),
]
if fields != names:
# Reorder according to fields.
index_map = {name: idx for idx, name in enumerate(names)}
rowfactory = operator.itemgetter(*[index_map[f] for f in fields])
return map(
rowfactory,
compiler.results_iter(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
),
)
return compiler.results_iter(
tuple_expected=True,
chunked_fetch=self.chunked_fetch,
chunk_size=self.chunk_size,
)
class NamedValuesListIterable(ValuesListIterable):
"""
Iterable returned by QuerySet.values_list(named=True) that yields a
namedtuple for each row.
"""
def __iter__(self):
queryset = self.queryset
if queryset._fields:
names = queryset._fields
else:
query = queryset.query
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
tuple_class = create_namedtuple_class(*names)
new = tuple.__new__
for row in super().__iter__():
yield new(tuple_class, row)
class FlatValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=True) that yields single
values.
"""
def __iter__(self):
queryset = self.queryset
compiler = queryset.query.get_compiler(queryset.db)
for row in compiler.results_iter(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
):
yield row[0]
class QuerySet:
"""Represent a lazy database lookup for a set of objects."""
def __init__(self, model=None, query=None, using=None, hints=None):
self.model = model
self._db = using
self._hints = hints or {}
self._query = query or sql.Query(self.model)
self._result_cache = None
self._sticky_filter = False
self._for_write = False
self._prefetch_related_lookups = ()
self._prefetch_done = False
self._known_related_objects = {} # {rel_field: {pk: rel_obj}}
self._iterable_class = ModelIterable
self._fields = None
self._defer_next_filter = False
self._deferred_filter = None
@property
def query(self):
if self._deferred_filter:
negate, args, kwargs = self._deferred_filter
self._filter_or_exclude_inplace(negate, args, kwargs)
self._deferred_filter = None
return self._query
@query.setter
def query(self, value):
if value.values_select:
self._iterable_class = ValuesIterable
self._query = value
def as_manager(cls):
# Address the circular dependency between `Queryset` and `Manager`.
from django.db.models.manager import Manager
manager = Manager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
########################
# PYTHON MAGIC METHODS #
########################
def __deepcopy__(self, memo):
"""Don't populate the QuerySet's cache."""
obj = self.__class__()
for k, v in self.__dict__.items():
if k == "_result_cache":
obj.__dict__[k] = None
else:
obj.__dict__[k] = copy.deepcopy(v, memo)
return obj
def __getstate__(self):
# Force the cache to be fully populated.
self._fetch_all()
return {**self.__dict__, DJANGO_VERSION_PICKLE_KEY: django.__version__}
def __setstate__(self, state):
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
if pickled_version != django.__version__:
warnings.warn(
"Pickled queryset instance's Django version %s does not "
"match the current version %s."
% (pickled_version, django.__version__),
RuntimeWarning,
stacklevel=2,
)
else:
warnings.warn(
"Pickled queryset instance's Django version is not specified.",
RuntimeWarning,
stacklevel=2,
)
self.__dict__.update(state)
def __repr__(self):
data = list(self[: REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return "<%s %r>" % (self.__class__.__name__, data)
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __iter__(self):
"""
The queryset iterator protocol uses three nested iterators in the
default case:
1. sql.compiler.execute_sql()
- Returns 100 rows at time (constants.GET_ITERATOR_CHUNK_SIZE)
using cursor.fetchmany(). This part is responsible for
doing some column masking, and returning the rows in chunks.
2. sql.compiler.results_iter()
- Returns one row at time. At this point the rows are still just
tuples. In some cases the return values are converted to
Python values at this location.
3. self.iterator()
- Responsible for turning the rows into model objects.
"""
self._fetch_all()
return iter(self._result_cache)
def __aiter__(self):
# Remember, __aiter__ itself is synchronous, it's the thing it returns
# that is async!
async def generator():
await self._async_fetch_all()
for item in self._result_cache:
yield item
return generator()
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __getitem__(self, k):
"""Retrieve an item or slice from the set of results."""
if not isinstance(k, (int, slice)):
raise TypeError(
"QuerySet indices must be integers or slices, not %s."
% type(k).__name__
)
if (isinstance(k, int) and k < 0) or (
isinstance(k, slice)
and (
(k.start is not None and k.start < 0)
or (k.stop is not None and k.stop < 0)
)
):
raise ValueError("Negative indexing is not supported.")
if self._result_cache is not None:
return self._result_cache[k]
if isinstance(k, slice):
qs = self._chain()
if k.start is not None:
start = int(k.start)
else:
start = None
if k.stop is not None:
stop = int(k.stop)
else:
stop = None
qs.query.set_limits(start, stop)
return list(qs)[:: k.step] if k.step else qs
qs = self._chain()
qs.query.set_limits(k, k + 1)
qs._fetch_all()
return qs._result_cache[0]
def __class_getitem__(cls, *args, **kwargs):
return cls
def __and__(self, other):
self._check_operator_queryset(other, "&")
self._merge_sanity_check(other)
if isinstance(other, EmptyQuerySet):
return other
if isinstance(self, EmptyQuerySet):
return self
combined = self._chain()
combined._merge_known_related_objects(other)
combined.query.combine(other.query, sql.AND)
return combined
def __or__(self, other):
self._check_operator_queryset(other, "|")
self._merge_sanity_check(other)
if isinstance(self, EmptyQuerySet):
return other
if isinstance(other, EmptyQuerySet):
return self
query = (
self
if self.query.can_filter()
else self.model._base_manager.filter(pk__in=self.values("pk"))
)
combined = query._chain()
combined._merge_known_related_objects(other)
if not other.query.can_filter():
other = other.model._base_manager.filter(pk__in=other.values("pk"))
combined.query.combine(other.query, sql.OR)
return combined
def __xor__(self, other):
self._check_operator_queryset(other, "^")
self._merge_sanity_check(other)
if isinstance(self, EmptyQuerySet):
return other
if isinstance(other, EmptyQuerySet):
return self
query = (
self
if self.query.can_filter()
else self.model._base_manager.filter(pk__in=self.values("pk"))
)
combined = query._chain()
combined._merge_known_related_objects(other)
if not other.query.can_filter():
other = other.model._base_manager.filter(pk__in=other.values("pk"))
combined.query.combine(other.query, sql.XOR)
return combined
####################################
# METHODS THAT DO DATABASE QUERIES #
####################################
def _iterator(self, use_chunked_fetch, chunk_size):
iterable = self._iterable_class(
self,
chunked_fetch=use_chunked_fetch,
chunk_size=chunk_size or 2000,
)
if not self._prefetch_related_lookups or chunk_size is None:
yield from iterable
return
iterator = iter(iterable)
while results := list(islice(iterator, chunk_size)):
prefetch_related_objects(results, *self._prefetch_related_lookups)
yield from results
def iterator(self, chunk_size=None):
"""
An iterator over the results from applying this QuerySet to the
database. chunk_size must be provided for QuerySets that prefetch
related objects. Otherwise, a default chunk_size of 2000 is supplied.
"""
if chunk_size is None:
if self._prefetch_related_lookups:
# When the deprecation ends, replace with:
# raise ValueError(
# 'chunk_size must be provided when using '
# 'QuerySet.iterator() after prefetch_related().'
# )
warnings.warn(
"Using QuerySet.iterator() after prefetch_related() "
"without specifying chunk_size is deprecated.",
category=RemovedInDjango50Warning,
stacklevel=2,
)
elif chunk_size <= 0:
raise ValueError("Chunk size must be strictly positive.")
use_chunked_fetch = not connections[self.db].settings_dict.get(
"DISABLE_SERVER_SIDE_CURSORS"
)
return self._iterator(use_chunked_fetch, chunk_size)
async def aiterator(self, chunk_size=2000):
"""
An asynchronous iterator over the results from applying this QuerySet
to the database.
"""
if self._prefetch_related_lookups:
raise NotSupportedError(
"Using QuerySet.aiterator() after prefetch_related() is not supported."
)
if chunk_size <= 0:
raise ValueError("Chunk size must be strictly positive.")
use_chunked_fetch = not connections[self.db].settings_dict.get(
"DISABLE_SERVER_SIDE_CURSORS"
)
async for item in self._iterable_class(
self, chunked_fetch=use_chunked_fetch, chunk_size=chunk_size
):
yield item
def aggregate(self, *args, **kwargs):
"""
Return a dictionary containing the calculations (aggregation)
over the current queryset.
If args is present the expression is passed as a kwarg using
the Aggregate object's default alias.
"""
if self.query.distinct_fields:
raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
self._validate_values_are_expressions(
(*args, *kwargs.values()), method_name="aggregate"
)
for arg in args:
# The default_alias property raises TypeError if default_alias
# can't be set automatically or AttributeError if it isn't an
# attribute.
try:
arg.default_alias
except (AttributeError, TypeError):
raise TypeError("Complex aggregates require an alias")
kwargs[arg.default_alias] = arg
query = self.query.chain()
for (alias, aggregate_expr) in kwargs.items():
query.add_annotation(aggregate_expr, alias, is_summary=True)
annotation = query.annotations[alias]
if not annotation.contains_aggregate:
raise TypeError("%s is not an aggregate expression" % alias)
for expr in annotation.get_source_expressions():
if (
expr.contains_aggregate
and isinstance(expr, Ref)
and expr.refs in kwargs
):
name = expr.refs
raise exceptions.FieldError(
"Cannot compute %s('%s'): '%s' is an aggregate"
% (annotation.name, name, name)
)
return query.get_aggregation(self.db, kwargs)
async def aaggregate(self, *args, **kwargs):
return await sync_to_async(self.aggregate)(*args, **kwargs)
def count(self):
"""
Perform a SELECT COUNT() and return the number of records as an
integer.
If the QuerySet is already fully cached, return the length of the
cached results set to avoid multiple SELECT COUNT(*) calls.
"""
if self._result_cache is not None:
return len(self._result_cache)
return self.query.get_count(using=self.db)
async def acount(self):
return await sync_to_async(self.count)()
def get(self, *args, **kwargs):
"""
Perform the query and return a single object matching the given
keyword arguments.
"""
if self.query.combinator and (args or kwargs):
raise NotSupportedError(
"Calling QuerySet.get(...) with filters after %s() is not "
"supported." % self.query.combinator
)
clone = self._chain() if self.query.combinator else self.filter(*args, **kwargs)
if self.query.can_filter() and not self.query.distinct_fields:
clone = clone.order_by()
limit = None
if (
not clone.query.select_for_update
or connections[clone.db].features.supports_select_for_update_with_limit
):
limit = MAX_GET_RESULTS
clone.query.set_limits(high=limit)
num = len(clone)
if num == 1:
return clone._result_cache[0]
if not num:
raise self.model.DoesNotExist(
"%s matching query does not exist." % self.model._meta.object_name
)
raise self.model.MultipleObjectsReturned(
"get() returned more than one %s -- it returned %s!"
% (
self.model._meta.object_name,
num if not limit or num < limit else "more than %s" % (limit - 1),
)
)
async def aget(self, *args, **kwargs):
return await sync_to_async(self.get)(*args, **kwargs)
def create(self, **kwargs):
"""
Create a new object with the given kwargs, saving it to the database
and returning the created object.
"""
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db)
return obj
async def acreate(self, **kwargs):
return await sync_to_async(self.create)(**kwargs)
def _prepare_for_bulk_create(self, objs):
for obj in objs:
if obj.pk is None:
# Populate new PK values.
obj.pk = obj._meta.pk.get_pk_value_on_save(obj)
obj._prepare_related_fields_for_save(operation_name="bulk_create")
def _check_bulk_create_options(
self, ignore_conflicts, update_conflicts, update_fields, unique_fields
):
if ignore_conflicts and update_conflicts:
raise ValueError(
"ignore_conflicts and update_conflicts are mutually exclusive."
)
db_features = connections[self.db].features
if ignore_conflicts:
if not db_features.supports_ignore_conflicts:
raise NotSupportedError(
"This database backend does not support ignoring conflicts."
)
return OnConflict.IGNORE
elif update_conflicts:
if not db_features.supports_update_conflicts:
raise NotSupportedError(
"This database backend does not support updating conflicts."
)
if not update_fields:
raise ValueError(
"Fields that will be updated when a row insertion fails "
"on conflicts must be provided."
)
if unique_fields and not db_features.supports_update_conflicts_with_target:
raise NotSupportedError(
"This database backend does not support updating "
"conflicts with specifying unique fields that can trigger "
"the upsert."
)
if not unique_fields and db_features.supports_update_conflicts_with_target:
raise ValueError(
"Unique fields that can trigger the upsert must be provided."
)
# Updating primary keys and non-concrete fields is forbidden.
update_fields = [self.model._meta.get_field(name) for name in update_fields]
if any(not f.concrete or f.many_to_many for f in update_fields):
raise ValueError(
"bulk_create() can only be used with concrete fields in "
"update_fields."
)
if any(f.primary_key for f in update_fields):
raise ValueError(
"bulk_create() cannot be used with primary keys in "
"update_fields."
)
if unique_fields:
# Primary key is allowed in unique_fields.
unique_fields = [
self.model._meta.get_field(name)
for name in unique_fields
if name != "pk"
]
if any(not f.concrete or f.many_to_many for f in unique_fields):
raise ValueError(
"bulk_create() can only be used with concrete fields "
"in unique_fields."
)
return OnConflict.UPDATE
return None
def bulk_create(
self,
objs,
batch_size=None,
ignore_conflicts=False,
update_conflicts=False,
update_fields=None,
unique_fields=None,
):
"""
Insert each of the instances into the database. Do *not* call
save() on each of the instances, do not send any pre/post_save
signals, and do not set the primary key attribute if it is an
autoincrement field (except if features.can_return_rows_from_bulk_insert=True).
Multi-table models are not supported.
"""
# When you bulk insert you don't get the primary keys back (if it's an
# autoincrement, except if can_return_rows_from_bulk_insert=True), so
# you can't insert into the child tables which references this. There
# are two workarounds:
# 1) This could be implemented if you didn't have an autoincrement pk
# 2) You could do it by doing O(n) normal inserts into the parent
# tables to get the primary keys back and then doing a single bulk
# insert into the childmost table.
# We currently set the primary keys on the objects when using
# PostgreSQL via the RETURNING ID clause. It should be possible for
# Oracle as well, but the semantics for extracting the primary keys is
# trickier so it's not done yet.
if batch_size is not None and batch_size <= 0:
raise ValueError("Batch size must be a positive integer.")
# Check that the parents share the same concrete model with the our
# model to detect the inheritance pattern ConcreteGrandParent ->
# MultiTableParent -> ProxyChild. Simply checking self.model._meta.proxy
# would not identify that case as involving multiple tables.
for parent in self.model._meta.get_parent_list():
if parent._meta.concrete_model is not self.model._meta.concrete_model:
raise ValueError("Can't bulk create a multi-table inherited model")
if not objs:
return objs
on_conflict = self._check_bulk_create_options(
ignore_conflicts,
update_conflicts,
update_fields,
unique_fields,
)
self._for_write = True
opts = self.model._meta
fields = opts.concrete_fields
objs = list(objs)
self._prepare_for_bulk_create(objs)
with transaction.atomic(using=self.db, savepoint=False):
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
if objs_with_pk:
returned_columns = self._batched_insert(
objs_with_pk,
fields,
batch_size,
on_conflict=on_conflict,
update_fields=update_fields,
unique_fields=unique_fields,
)
for obj_with_pk, results in zip(objs_with_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
if field != opts.pk:
setattr(obj_with_pk, field.attname, result)
for obj_with_pk in objs_with_pk:
obj_with_pk._state.adding = False
obj_with_pk._state.db = self.db
if objs_without_pk:
fields = [f for f in fields if not isinstance(f, AutoField)]
returned_columns = self._batched_insert(
objs_without_pk,
fields,
batch_size,
on_conflict=on_conflict,
update_fields=update_fields,
unique_fields=unique_fields,
)
connection = connections[self.db]
if (
connection.features.can_return_rows_from_bulk_insert
and on_conflict is None
):
assert len(returned_columns) == len(objs_without_pk)
for obj_without_pk, results in zip(objs_without_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
setattr(obj_without_pk, field.attname, result)
obj_without_pk._state.adding = False
obj_without_pk._state.db = self.db
return objs
async def abulk_create(self, objs, batch_size=None, ignore_conflicts=False):
return await sync_to_async(self.bulk_create)(
objs=objs,
batch_size=batch_size,
ignore_conflicts=ignore_conflicts,
)
def bulk_update(self, objs, fields, batch_size=None):
"""
Update the given fields in each of the given objects in the database.
"""
if batch_size is not None and batch_size < 0:
raise ValueError("Batch size must be a positive integer.")
if not fields:
raise ValueError("Field names must be given to bulk_update().")
objs = tuple(objs)
if any(obj.pk is None for obj in objs):
raise ValueError("All bulk_update() objects must have a primary key set.")
fields = [self.model._meta.get_field(name) for name in fields]
if any(not f.concrete or f.many_to_many for f in fields):
raise ValueError("bulk_update() can only be used with concrete fields.")
if any(f.primary_key for f in fields):
raise ValueError("bulk_update() cannot be used with primary key fields.")
if not objs:
return 0
for obj in objs:
obj._prepare_related_fields_for_save(
operation_name="bulk_update", fields=fields
)
# PK is used twice in the resulting update query, once in the filter
# and once in the WHEN. Each field will also have one CAST.
self._for_write = True
connection = connections[self.db]
max_batch_size = connection.ops.bulk_batch_size(["pk", "pk"] + fields, objs)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
requires_casting = connection.features.requires_casted_case_in_updates
batches = (objs[i : i + batch_size] for i in range(0, len(objs), batch_size))
updates = []
for batch_objs in batches:
update_kwargs = {}
for field in fields:
when_statements = []
for obj in batch_objs:
attr = getattr(obj, field.attname)
if not hasattr(attr, "resolve_expression"):
attr = Value(attr, output_field=field)
when_statements.append(When(pk=obj.pk, then=attr))
case_statement = Case(*when_statements, output_field=field)
if requires_casting:
case_statement = Cast(case_statement, output_field=field)
update_kwargs[field.attname] = case_statement
updates.append(([obj.pk for obj in batch_objs], update_kwargs))
rows_updated = 0
queryset = self.using(self.db)
with transaction.atomic(using=self.db, savepoint=False):
for pks, update_kwargs in updates:
rows_updated += queryset.filter(pk__in=pks).update(**update_kwargs)
return rows_updated
bulk_update.alters_data = True
async def abulk_update(self, objs, fields, batch_size=None):
return await sync_to_async(self.bulk_update)(
objs=objs,
fields=fields,
batch_size=batch_size,
)
abulk_update.alters_data = True
def get_or_create(self, defaults=None, **kwargs):
"""
Look up an object with the given kwargs, creating one if necessary.
Return a tuple of (object, created), where created is a boolean
specifying whether an object was created.
"""
# The get() needs to be targeted at the write database in order
# to avoid potential transaction consistency problems.
self._for_write = True
try:
return self.get(**kwargs), False
except self.model.DoesNotExist:
params = self._extract_model_params(defaults, **kwargs)
# Try to create an object using passed params.
try:
with transaction.atomic(using=self.db):
params = dict(resolve_callables(params))
return self.create(**params), True
except IntegrityError:
try:
return self.get(**kwargs), False
except self.model.DoesNotExist:
pass
raise
async def aget_or_create(self, defaults=None, **kwargs):
return await sync_to_async(self.get_or_create)(
defaults=defaults,
**kwargs,
)
def update_or_create(self, defaults=None, **kwargs):
"""
Look up an object with the given kwargs, updating one with defaults
if it exists, otherwise create a new one.
Return a tuple (object, created), where created is a boolean
specifying whether an object was created.
"""
defaults = defaults or {}
self._for_write = True
with transaction.atomic(using=self.db):
# Lock the row so that a concurrent update is blocked until
# update_or_create() has performed its save.
obj, created = self.select_for_update().get_or_create(defaults, **kwargs)
if created:
return obj, created
for k, v in resolve_callables(defaults):
setattr(obj, k, v)
obj.save(using=self.db)
return obj, False
async def aupdate_or_create(self, defaults=None, **kwargs):
return await sync_to_async(self.update_or_create)(
defaults=defaults,
**kwargs,
)
def _extract_model_params(self, defaults, **kwargs):
"""
Prepare `params` for creating a model instance based on the given
kwargs; for use by get_or_create().
"""
defaults = defaults or {}
params = {k: v for k, v in kwargs.items() if LOOKUP_SEP not in k}
params.update(defaults)
property_names = self.model._meta._property_names
invalid_params = []
for param in params:
try:
self.model._meta.get_field(param)
except exceptions.FieldDoesNotExist:
# It's okay to use a model's property if it has a setter.
if not (param in property_names and getattr(self.model, param).fset):
invalid_params.append(param)
if invalid_params:
raise exceptions.FieldError(
"Invalid field name(s) for model %s: '%s'."
% (
self.model._meta.object_name,
"', '".join(sorted(invalid_params)),
)
)
return params
def _earliest(self, *fields):
"""
Return the earliest object according to fields (if given) or by the
model's Meta.get_latest_by.
"""
if fields:
order_by = fields
else:
order_by = getattr(self.model._meta, "get_latest_by")
if order_by and not isinstance(order_by, (tuple, list)):
order_by = (order_by,)
if order_by is None:
raise ValueError(
"earliest() and latest() require either fields as positional "
"arguments or 'get_latest_by' in the model's Meta."
)
obj = self._chain()
obj.query.set_limits(high=1)
obj.query.clear_ordering(force=True)
obj.query.add_ordering(*order_by)
return obj.get()
def earliest(self, *fields):
if self.query.is_sliced:
raise TypeError("Cannot change a query once a slice has been taken.")
return self._earliest(*fields)
async def aearliest(self, *fields):
return await sync_to_async(self.earliest)(*fields)
def latest(self, *fields):
"""
Return the latest object according to fields (if given) or by the
model's Meta.get_latest_by.
"""
if self.query.is_sliced:
raise TypeError("Cannot change a query once a slice has been taken.")
return self.reverse()._earliest(*fields)
async def alatest(self, *fields):
return await sync_to_async(self.latest)(*fields)
def first(self):
"""Return the first object of a query or None if no match is found."""
for obj in (self if self.ordered else self.order_by("pk"))[:1]:
return obj
async def afirst(self):
return await sync_to_async(self.first)()
def last(self):
"""Return the last object of a query or None if no match is found."""
for obj in (self.reverse() if self.ordered else self.order_by("-pk"))[:1]:
return obj
async def alast(self):
return await sync_to_async(self.last)()
def in_bulk(self, id_list=None, *, field_name="pk"):
"""
Return a dictionary mapping each of the given IDs to the object with
that ID. If `id_list` isn't provided, evaluate the entire QuerySet.
"""
if self.query.is_sliced:
raise TypeError("Cannot use 'limit' or 'offset' with in_bulk().")
opts = self.model._meta
unique_fields = [
constraint.fields[0]
for constraint in opts.total_unique_constraints
if len(constraint.fields) == 1
]
if (
field_name != "pk"
and not opts.get_field(field_name).unique
and field_name not in unique_fields
and self.query.distinct_fields != (field_name,)
):
raise ValueError(
"in_bulk()'s field_name must be a unique field but %r isn't."
% field_name
)
if id_list is not None:
if not id_list:
return {}
filter_key = "{}__in".format(field_name)
batch_size = connections[self.db].features.max_query_params
id_list = tuple(id_list)
# If the database has a limit on the number of query parameters
# (e.g. SQLite), retrieve objects in batches if necessary.
if batch_size and batch_size < len(id_list):
qs = ()
for offset in range(0, len(id_list), batch_size):
batch = id_list[offset : offset + batch_size]
qs += tuple(self.filter(**{filter_key: batch}).order_by())
else:
qs = self.filter(**{filter_key: id_list}).order_by()
else:
qs = self._chain()
return {getattr(obj, field_name): obj for obj in qs}
async def ain_bulk(self, id_list=None, *, field_name="pk"):
return await sync_to_async(self.in_bulk)(
id_list=id_list,
field_name=field_name,
)
def delete(self):
"""Delete the records in the current QuerySet."""
self._not_support_combined_queries("delete")
if self.query.is_sliced:
raise TypeError("Cannot use 'limit' or 'offset' with delete().")
if self.query.distinct or self.query.distinct_fields:
raise TypeError("Cannot call delete() after .distinct().")
if self._fields is not None:
raise TypeError("Cannot call delete() after .values() or .values_list()")
del_query = self._chain()
# The delete is actually 2 queries - one to find related objects,
# and one to delete. Make sure that the discovery of related
# objects is performed on the same database as the deletion.
del_query._for_write = True
# Disable non-supported fields.
del_query.query.select_for_update = False
del_query.query.select_related = False
del_query.query.clear_ordering(force=True)
collector = Collector(using=del_query.db, origin=self)
collector.collect(del_query)
deleted, _rows_count = collector.delete()
# Clear the result cache, in case this QuerySet gets reused.
self._result_cache = None
return deleted, _rows_count
delete.alters_data = True
delete.queryset_only = True
async def adelete(self):
return await sync_to_async(self.delete)()
adelete.alters_data = True
adelete.queryset_only = True
def _raw_delete(self, using):
"""
Delete objects found from the given queryset in single direct SQL
query. No signals are sent and there is no protection for cascades.
"""
query = self.query.clone()
query.__class__ = sql.DeleteQuery
cursor = query.get_compiler(using).execute_sql(CURSOR)
if cursor:
with cursor:
return cursor.rowcount
return 0
_raw_delete.alters_data = True
def update(self, **kwargs):
"""
Update all elements in the current QuerySet, setting all the given
fields to the appropriate values.
"""
self._not_support_combined_queries("update")
if self.query.is_sliced:
raise TypeError("Cannot update a query once a slice has been taken.")
self._for_write = True
query = self.query.chain(sql.UpdateQuery)
query.add_update_values(kwargs)
# Clear any annotations so that they won't be present in subqueries.
query.annotations = {}
with transaction.mark_for_rollback_on_error(using=self.db):
rows = query.get_compiler(self.db).execute_sql(CURSOR)
self._result_cache = None
return rows
update.alters_data = True
async def aupdate(self, **kwargs):
return await sync_to_async(self.update)(**kwargs)
aupdate.alters_data = True
def _update(self, values):
"""
A version of update() that accepts field objects instead of field names.
Used primarily for model saving and not intended for use by general
code (it requires too much poking around at model internals to be
useful at that level).
"""
if self.query.is_sliced:
raise TypeError("Cannot update a query once a slice has been taken.")
query = self.query.chain(sql.UpdateQuery)
query.add_update_fields(values)
# Clear any annotations so that they won't be present in subqueries.
query.annotations = {}
self._result_cache = None
return query.get_compiler(self.db).execute_sql(CURSOR)
_update.alters_data = True
_update.queryset_only = False
def exists(self):
"""
Return True if the QuerySet would have any results, False otherwise.
"""
if self._result_cache is None:
return self.query.has_results(using=self.db)
return bool(self._result_cache)
async def aexists(self):
return await sync_to_async(self.exists)()
def contains(self, obj):
"""
Return True if the QuerySet contains the provided obj,
False otherwise.
"""
self._not_support_combined_queries("contains")
if self._fields is not None:
raise TypeError(
"Cannot call QuerySet.contains() after .values() or .values_list()."
)
try:
if obj._meta.concrete_model != self.model._meta.concrete_model:
return False
except AttributeError:
raise TypeError("'obj' must be a model instance.")
if obj.pk is None:
raise ValueError("QuerySet.contains() cannot be used on unsaved objects.")
if self._result_cache is not None:
return obj in self._result_cache
return self.filter(pk=obj.pk).exists()
async def acontains(self, obj):
return await sync_to_async(self.contains)(obj=obj)
def _prefetch_related_objects(self):
# This method can only be called once the result cache has been filled.
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
self._prefetch_done = True
def explain(self, *, format=None, **options):
"""
Runs an EXPLAIN on the SQL query this QuerySet would perform, and
returns the results.
"""
return self.query.explain(using=self.db, format=format, **options)
async def aexplain(self, *, format=None, **options):
return await sync_to_async(self.explain)(format=format, **options)
##################################################
# PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
##################################################
def raw(self, raw_query, params=(), translations=None, using=None):
if using is None:
using = self.db
qs = RawQuerySet(
raw_query,
model=self.model,
params=params,
translations=translations,
using=using,
)
qs._prefetch_related_lookups = self._prefetch_related_lookups[:]
return qs
def _values(self, *fields, **expressions):
clone = self._chain()
if expressions:
clone = clone.annotate(**expressions)
clone._fields = fields
clone.query.set_values(fields)
return clone
def values(self, *fields, **expressions):
fields += tuple(expressions)
clone = self._values(*fields, **expressions)
clone._iterable_class = ValuesIterable
return clone
def values_list(self, *fields, flat=False, named=False):
if flat and named:
raise TypeError("'flat' and 'named' can't be used together.")
if flat and len(fields) > 1:
raise TypeError(
"'flat' is not valid when values_list is called with more than one "
"field."
)
field_names = {f for f in fields if not hasattr(f, "resolve_expression")}
_fields = []
expressions = {}
counter = 1
for field in fields:
if hasattr(field, "resolve_expression"):
field_id_prefix = getattr(
field, "default_alias", field.__class__.__name__.lower()
)
while True:
field_id = field_id_prefix + str(counter)
counter += 1
if field_id not in field_names:
break
expressions[field_id] = field
_fields.append(field_id)
else:
_fields.append(field)
clone = self._values(*_fields, **expressions)
clone._iterable_class = (
NamedValuesListIterable
if named
else FlatValuesListIterable
if flat
else ValuesListIterable
)
return clone
def dates(self, field_name, kind, order="ASC"):
"""
Return a list of date objects representing all available dates for
the given field_name, scoped to 'kind'.
"""
if kind not in ("year", "month", "week", "day"):
raise ValueError("'kind' must be one of 'year', 'month', 'week', or 'day'.")
if order not in ("ASC", "DESC"):
raise ValueError("'order' must be either 'ASC' or 'DESC'.")
return (
self.annotate(
datefield=Trunc(field_name, kind, output_field=DateField()),
plain_field=F(field_name),
)
.values_list("datefield", flat=True)
.distinct()
.filter(plain_field__isnull=False)
.order_by(("-" if order == "DESC" else "") + "datefield")
)
# RemovedInDjango50Warning: when the deprecation ends, remove is_dst
# argument.
def datetimes(
self, field_name, kind, order="ASC", tzinfo=None, is_dst=timezone.NOT_PASSED
):
"""
Return a list of datetime objects representing all available
datetimes for the given field_name, scoped to 'kind'.
"""
if kind not in ("year", "month", "week", "day", "hour", "minute", "second"):
raise ValueError(
"'kind' must be one of 'year', 'month', 'week', 'day', "
"'hour', 'minute', or 'second'."
)
if order not in ("ASC", "DESC"):
raise ValueError("'order' must be either 'ASC' or 'DESC'.")
if settings.USE_TZ:
if tzinfo is None:
tzinfo = timezone.get_current_timezone()
else:
tzinfo = None
return (
self.annotate(
datetimefield=Trunc(
field_name,
kind,
output_field=DateTimeField(),
tzinfo=tzinfo,
is_dst=is_dst,
),
plain_field=F(field_name),
)
.values_list("datetimefield", flat=True)
.distinct()
.filter(plain_field__isnull=False)
.order_by(("-" if order == "DESC" else "") + "datetimefield")
)
def none(self):
"""Return an empty QuerySet."""
clone = self._chain()
clone.query.set_empty()
return clone
##################################################################
# PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET #
##################################################################
def all(self):
"""
Return a new QuerySet that is a copy of the current one. This allows a
QuerySet to proxy for a model manager in some cases.
"""
return self._chain()
def filter(self, *args, **kwargs):
"""
Return a new QuerySet instance with the args ANDed to the existing
set.
"""
self._not_support_combined_queries("filter")
return self._filter_or_exclude(False, args, kwargs)
def exclude(self, *args, **kwargs):
"""
Return a new QuerySet instance with NOT (args) ANDed to the existing
set.
"""
self._not_support_combined_queries("exclude")
return self._filter_or_exclude(True, args, kwargs)
def _filter_or_exclude(self, negate, args, kwargs):
if (args or kwargs) and self.query.is_sliced:
raise TypeError("Cannot filter a query once a slice has been taken.")
clone = self._chain()
if self._defer_next_filter:
self._defer_next_filter = False
clone._deferred_filter = negate, args, kwargs
else:
clone._filter_or_exclude_inplace(negate, args, kwargs)
return clone
def _filter_or_exclude_inplace(self, negate, args, kwargs):
if negate:
self._query.add_q(~Q(*args, **kwargs))
else:
self._query.add_q(Q(*args, **kwargs))
def complex_filter(self, filter_obj):
"""
Return a new QuerySet instance with filter_obj added to the filters.
filter_obj can be a Q object or a dictionary of keyword lookup
arguments.
This exists to support framework features such as 'limit_choices_to',
and usually it will be more natural to use other methods.
"""
if isinstance(filter_obj, Q):
clone = self._chain()
clone.query.add_q(filter_obj)
return clone
else:
return self._filter_or_exclude(False, args=(), kwargs=filter_obj)
def _combinator_query(self, combinator, *other_qs, all=False):
# Clone the query to inherit the select list and everything
clone = self._chain()
# Clear limits and ordering so they can be reapplied
clone.query.clear_ordering(force=True)
clone.query.clear_limits()
clone.query.combined_queries = (self.query,) + tuple(
qs.query for qs in other_qs
)
clone.query.combinator = combinator
clone.query.combinator_all = all
return clone
def union(self, *other_qs, all=False):
# If the query is an EmptyQuerySet, combine all nonempty querysets.
if isinstance(self, EmptyQuerySet):
qs = [q for q in other_qs if not isinstance(q, EmptyQuerySet)]
if not qs:
return self
if len(qs) == 1:
return qs[0]
return qs[0]._combinator_query("union", *qs[1:], all=all)
return self._combinator_query("union", *other_qs, all=all)
def intersection(self, *other_qs):
# If any query is an EmptyQuerySet, return it.
if isinstance(self, EmptyQuerySet):
return self
for other in other_qs:
if isinstance(other, EmptyQuerySet):
return other
return self._combinator_query("intersection", *other_qs)
def difference(self, *other_qs):
# If the query is an EmptyQuerySet, return it.
if isinstance(self, EmptyQuerySet):
return self
return self._combinator_query("difference", *other_qs)
def select_for_update(self, nowait=False, skip_locked=False, of=(), no_key=False):
"""
Return a new QuerySet instance that will select objects with a
FOR UPDATE lock.
"""
if nowait and skip_locked:
raise ValueError("The nowait option cannot be used with skip_locked.")
obj = self._chain()
obj._for_write = True
obj.query.select_for_update = True
obj.query.select_for_update_nowait = nowait
obj.query.select_for_update_skip_locked = skip_locked
obj.query.select_for_update_of = of
obj.query.select_for_no_key_update = no_key
return obj
def select_related(self, *fields):
"""
Return a new QuerySet instance that will select related objects.
If fields are specified, they must be ForeignKey fields and only those
related objects are included in the selection.
If select_related(None) is called, clear the list.
"""
self._not_support_combined_queries("select_related")
if self._fields is not None:
raise TypeError(
"Cannot call select_related() after .values() or .values_list()"
)
obj = self._chain()
if fields == (None,):
obj.query.select_related = False
elif fields:
obj.query.add_select_related(fields)
else:
obj.query.select_related = True
return obj
def prefetch_related(self, *lookups):
"""
Return a new QuerySet instance that will prefetch the specified
Many-To-One and Many-To-Many related objects when the QuerySet is
evaluated.
When prefetch_related() is called more than once, append to the list of
prefetch lookups. If prefetch_related(None) is called, clear the list.
"""
self._not_support_combined_queries("prefetch_related")
clone = self._chain()
if lookups == (None,):
clone._prefetch_related_lookups = ()
else:
for lookup in lookups:
if isinstance(lookup, Prefetch):
lookup = lookup.prefetch_to
lookup = lookup.split(LOOKUP_SEP, 1)[0]
if lookup in self.query._filtered_relations:
raise ValueError(
"prefetch_related() is not supported with FilteredRelation."
)
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
return clone
def annotate(self, *args, **kwargs):
"""
Return a query set in which the returned objects have been annotated
with extra data or aggregations.
"""
self._not_support_combined_queries("annotate")
return self._annotate(args, kwargs, select=True)
def alias(self, *args, **kwargs):
"""
Return a query set with added aliases for extra data or aggregations.
"""
self._not_support_combined_queries("alias")
return self._annotate(args, kwargs, select=False)
def _annotate(self, args, kwargs, select=True):
self._validate_values_are_expressions(
args + tuple(kwargs.values()), method_name="annotate"
)
annotations = {}
for arg in args:
# The default_alias property may raise a TypeError.
try:
if arg.default_alias in kwargs:
raise ValueError(
"The named annotation '%s' conflicts with the "
"default name for another annotation." % arg.default_alias
)
except TypeError:
raise TypeError("Complex annotations require an alias")
annotations[arg.default_alias] = arg
annotations.update(kwargs)
clone = self._chain()
names = self._fields
if names is None:
names = set(
chain.from_iterable(
(field.name, field.attname)
if hasattr(field, "attname")
else (field.name,)
for field in self.model._meta.get_fields()
)
)
for alias, annotation in annotations.items():
if alias in names:
raise ValueError(
"The annotation '%s' conflicts with a field on "
"the model." % alias
)
if isinstance(annotation, FilteredRelation):
clone.query.add_filtered_relation(annotation, alias)
else:
clone.query.add_annotation(
annotation,
alias,
is_summary=False,
select=select,
)
for alias, annotation in clone.query.annotations.items():
if alias in annotations and annotation.contains_aggregate:
if clone._fields is None:
clone.query.group_by = True
else:
clone.query.set_group_by()
break
return clone
def order_by(self, *field_names):
"""Return a new QuerySet instance with the ordering changed."""
if self.query.is_sliced:
raise TypeError("Cannot reorder a query once a slice has been taken.")
obj = self._chain()
obj.query.clear_ordering(force=True, clear_default=False)
obj.query.add_ordering(*field_names)
return obj
def distinct(self, *field_names):
"""
Return a new QuerySet instance that will select only distinct results.
"""
self._not_support_combined_queries("distinct")
if self.query.is_sliced:
raise TypeError(
"Cannot create distinct fields once a slice has been taken."
)
obj = self._chain()
obj.query.add_distinct_fields(*field_names)
return obj
def extra(
self,
select=None,
where=None,
params=None,
tables=None,
order_by=None,
select_params=None,
):
"""Add extra SQL fragments to the query."""
self._not_support_combined_queries("extra")
if self.query.is_sliced:
raise TypeError("Cannot change a query once a slice has been taken.")
clone = self._chain()
clone.query.add_extra(select, select_params, where, params, tables, order_by)
return clone
def reverse(self):
"""Reverse the ordering of the QuerySet."""
if self.query.is_sliced:
raise TypeError("Cannot reverse a query once a slice has been taken.")
clone = self._chain()
clone.query.standard_ordering = not clone.query.standard_ordering
return clone
def defer(self, *fields):
"""
Defer the loading of data for certain fields until they are accessed.
Add the set of deferred fields to any existing set of deferred fields.
The only exception to this is if None is passed in as the only
parameter, in which case removal all deferrals.
"""
self._not_support_combined_queries("defer")
if self._fields is not None:
raise TypeError("Cannot call defer() after .values() or .values_list()")
clone = self._chain()
if fields == (None,):
clone.query.clear_deferred_loading()
else:
clone.query.add_deferred_loading(fields)
return clone
def only(self, *fields):
"""
Essentially, the opposite of defer(). Only the fields passed into this
method and that are not already specified as deferred are loaded
immediately when the queryset is evaluated.
"""
self._not_support_combined_queries("only")
if self._fields is not None:
raise TypeError("Cannot call only() after .values() or .values_list()")
if fields == (None,):
# Can only pass None to defer(), not only(), as the rest option.
# That won't stop people trying to do this, so let's be explicit.
raise TypeError("Cannot pass None as an argument to only().")
for field in fields:
field = field.split(LOOKUP_SEP, 1)[0]
if field in self.query._filtered_relations:
raise ValueError("only() is not supported with FilteredRelation.")
clone = self._chain()
clone.query.add_immediate_loading(fields)
return clone
def using(self, alias):
"""Select which database this QuerySet should execute against."""
clone = self._chain()
clone._db = alias
return clone
###################################
# PUBLIC INTROSPECTION ATTRIBUTES #
###################################
@property
def ordered(self):
"""
Return True if the QuerySet is ordered -- i.e. has an order_by()
clause or a default ordering on the model (or is empty).
"""
if isinstance(self, EmptyQuerySet):
return True
if self.query.extra_order_by or self.query.order_by:
return True
elif (
self.query.default_ordering
and self.query.get_meta().ordering
and
# A default ordering doesn't affect GROUP BY queries.
not self.query.group_by
):
return True
else:
return False
@property
def db(self):
"""Return the database used if this query is executed now."""
if self._for_write:
return self._db or router.db_for_write(self.model, **self._hints)
return self._db or router.db_for_read(self.model, **self._hints)
###################
# PRIVATE METHODS #
###################
def _insert(
self,
objs,
fields,
returning_fields=None,
raw=False,
using=None,
on_conflict=None,
update_fields=None,
unique_fields=None,
):
"""
Insert a new record for the given model. This provides an interface to
the InsertQuery class and is how Model.save() is implemented.
"""
self._for_write = True
if using is None:
using = self.db
query = sql.InsertQuery(
self.model,
on_conflict=on_conflict,
update_fields=update_fields,
unique_fields=unique_fields,
)
query.insert_values(fields, objs, raw=raw)
return query.get_compiler(using=using).execute_sql(returning_fields)
_insert.alters_data = True
_insert.queryset_only = False
def _batched_insert(
self,
objs,
fields,
batch_size,
on_conflict=None,
update_fields=None,
unique_fields=None,
):
"""
Helper method for bulk_create() to insert objs one batch at a time.
"""
connection = connections[self.db]
ops = connection.ops
max_batch_size = max(ops.bulk_batch_size(fields, objs), 1)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
inserted_rows = []
bulk_return = connection.features.can_return_rows_from_bulk_insert
for item in [objs[i : i + batch_size] for i in range(0, len(objs), batch_size)]:
if bulk_return and on_conflict is None:
inserted_rows.extend(
self._insert(
item,
fields=fields,
using=self.db,
returning_fields=self.model._meta.db_returning_fields,
)
)
else:
self._insert(
item,
fields=fields,
using=self.db,
on_conflict=on_conflict,
update_fields=update_fields,
unique_fields=unique_fields,
)
return inserted_rows
def _chain(self):
"""
Return a copy of the current QuerySet that's ready for another
operation.
"""
obj = self._clone()
if obj._sticky_filter:
obj.query.filter_is_sticky = True
obj._sticky_filter = False
return obj
def _clone(self):
"""
Return a copy of the current QuerySet. A lightweight alternative
to deepcopy().
"""
c = self.__class__(
model=self.model,
query=self.query.chain(),
using=self._db,
hints=self._hints,
)
c._sticky_filter = self._sticky_filter
c._for_write = self._for_write
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
c._known_related_objects = self._known_related_objects
c._iterable_class = self._iterable_class
c._fields = self._fields
return c
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self._iterable_class(self))
if self._prefetch_related_lookups and not self._prefetch_done:
self._prefetch_related_objects()
async def _async_fetch_all(self):
if self._result_cache is None:
self._result_cache = [result async for result in self._iterable_class(self)]
if self._prefetch_related_lookups and not self._prefetch_done:
sync_to_async(self._prefetch_related_objects)()
def _next_is_sticky(self):
"""
Indicate that the next filter call and the one following that should
be treated as a single filter. This is only important when it comes to
determining when to reuse tables for many-to-many filters. Required so
that we can filter naturally on the results of related managers.
This doesn't return a clone of the current QuerySet (it returns
"self"). The method is only used internally and should be immediately
followed by a filter() that does create a clone.
"""
self._sticky_filter = True
return self
def _merge_sanity_check(self, other):
"""Check that two QuerySet classes may be merged."""
if self._fields is not None and (
set(self.query.values_select) != set(other.query.values_select)
or set(self.query.extra_select) != set(other.query.extra_select)
or set(self.query.annotation_select) != set(other.query.annotation_select)
):
raise TypeError(
"Merging '%s' classes must involve the same values in each case."
% self.__class__.__name__
)
def _merge_known_related_objects(self, other):
"""
Keep track of all known related objects from either QuerySet instance.
"""
for field, objects in other._known_related_objects.items():
self._known_related_objects.setdefault(field, {}).update(objects)
def resolve_expression(self, *args, **kwargs):
if self._fields and len(self._fields) > 1:
# values() queryset can only be used as nested queries
# if they are set up to select only a single field.
raise TypeError("Cannot use multi-field values as a filter value.")
query = self.query.resolve_expression(*args, **kwargs)
query._db = self._db
return query
resolve_expression.queryset_only = True
def _add_hints(self, **hints):
"""
Update hinting information for use by routers. Add new key/values or
overwrite existing key/values.
"""
self._hints.update(hints)
def _has_filters(self):
"""
Check if this QuerySet has any filtering going on. This isn't
equivalent with checking if all objects are present in results, for
example, qs[1:]._has_filters() -> False.
"""
return self.query.has_filters()
@staticmethod
def _validate_values_are_expressions(values, method_name):
invalid_args = sorted(
str(arg) for arg in values if not hasattr(arg, "resolve_expression")
)
if invalid_args:
raise TypeError(
"QuerySet.%s() received non-expression(s): %s."
% (
method_name,
", ".join(invalid_args),
)
)
def _not_support_combined_queries(self, operation_name):
if self.query.combinator:
raise NotSupportedError(
"Calling QuerySet.%s() after %s() is not supported."
% (operation_name, self.query.combinator)
)
def _check_operator_queryset(self, other, operator_):
if self.query.combinator or other.query.combinator:
raise TypeError(f"Cannot use {operator_} operator with combined queryset.")
class InstanceCheckMeta(type):
def __instancecheck__(self, instance):
return isinstance(instance, QuerySet) and instance.query.is_empty()
class EmptyQuerySet(metaclass=InstanceCheckMeta):
"""
Marker class to checking if a queryset is empty by .none():
isinstance(qs.none(), EmptyQuerySet) -> True
"""
def __init__(self, *args, **kwargs):
raise TypeError("EmptyQuerySet can't be instantiated")
class RawQuerySet:
"""
Provide an iterator which converts the results of raw SQL queries into
annotated model instances.
"""
def __init__(
self,
raw_query,
model=None,
query=None,
params=(),
translations=None,
using=None,
hints=None,
):
self.raw_query = raw_query
self.model = model
self._db = using
self._hints = hints or {}
self.query = query or sql.RawQuery(sql=raw_query, using=self.db, params=params)
self.params = params
self.translations = translations or {}
self._result_cache = None
self._prefetch_related_lookups = ()
self._prefetch_done = False
def resolve_model_init_order(self):
"""Resolve the init field names and value positions."""
converter = connections[self.db].introspection.identifier_converter
model_init_fields = [
f for f in self.model._meta.fields if converter(f.column) in self.columns
]
annotation_fields = [
(column, pos)
for pos, column in enumerate(self.columns)
if column not in self.model_fields
]
model_init_order = [
self.columns.index(converter(f.column)) for f in model_init_fields
]
model_init_names = [f.attname for f in model_init_fields]
return model_init_names, model_init_order, annotation_fields
def prefetch_related(self, *lookups):
"""Same as QuerySet.prefetch_related()"""
clone = self._clone()
if lookups == (None,):
clone._prefetch_related_lookups = ()
else:
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
return clone
def _prefetch_related_objects(self):
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
self._prefetch_done = True
def _clone(self):
"""Same as QuerySet._clone()"""
c = self.__class__(
self.raw_query,
model=self.model,
query=self.query,
params=self.params,
translations=self.translations,
using=self._db,
hints=self._hints,
)
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
return c
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self.iterator())
if self._prefetch_related_lookups and not self._prefetch_done:
self._prefetch_related_objects()
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __iter__(self):
self._fetch_all()
return iter(self._result_cache)
def iterator(self):
# Cache some things for performance reasons outside the loop.
db = self.db
connection = connections[db]
compiler = connection.ops.compiler("SQLCompiler")(self.query, connection, db)
query = iter(self.query)
try:
(
model_init_names,
model_init_pos,
annotation_fields,
) = self.resolve_model_init_order()
if self.model._meta.pk.attname not in model_init_names:
raise exceptions.FieldDoesNotExist(
"Raw query must include the primary key"
)
model_cls = self.model
fields = [self.model_fields.get(c) for c in self.columns]
converters = compiler.get_converters(
[f.get_col(f.model._meta.db_table) if f else None for f in fields]
)
if converters:
query = compiler.apply_converters(query, converters)
for values in query:
# Associate fields to values
model_init_values = [values[pos] for pos in model_init_pos]
instance = model_cls.from_db(db, model_init_names, model_init_values)
if annotation_fields:
for column, pos in annotation_fields:
setattr(instance, column, values[pos])
yield instance
finally:
# Done iterating the Query. If it has its own cursor, close it.
if hasattr(self.query, "cursor") and self.query.cursor:
self.query.cursor.close()
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.query)
def __getitem__(self, k):
return list(self)[k]
@property
def db(self):
"""Return the database used if this query is executed now."""
return self._db or router.db_for_read(self.model, **self._hints)
def using(self, alias):
"""Select the database this RawQuerySet should execute against."""
return RawQuerySet(
self.raw_query,
model=self.model,
query=self.query.chain(using=alias),
params=self.params,
translations=self.translations,
using=alias,
)
@cached_property
def columns(self):
"""
A list of model field names in the order they'll appear in the
query results.
"""
columns = self.query.get_columns()
# Adjust any column names which don't match field names
for (query_name, model_name) in self.translations.items():
# Ignore translations for nonexistent column names
try:
index = columns.index(query_name)
except ValueError:
pass
else:
columns[index] = model_name
return columns
@cached_property
def model_fields(self):
"""A dict mapping column names to model field names."""
converter = connections[self.db].introspection.identifier_converter
model_fields = {}
for field in self.model._meta.fields:
name, column = field.get_attname_column()
model_fields[converter(column)] = field
return model_fields
class Prefetch:
def __init__(self, lookup, queryset=None, to_attr=None):
# `prefetch_through` is the path we traverse to perform the prefetch.
self.prefetch_through = lookup
# `prefetch_to` is the path to the attribute that stores the result.
self.prefetch_to = lookup
if queryset is not None and (
isinstance(queryset, RawQuerySet)
or (
hasattr(queryset, "_iterable_class")
and not issubclass(queryset._iterable_class, ModelIterable)
)
):
raise ValueError(
"Prefetch querysets cannot use raw(), values(), and values_list()."
)
if to_attr:
self.prefetch_to = LOOKUP_SEP.join(
lookup.split(LOOKUP_SEP)[:-1] + [to_attr]
)
self.queryset = queryset
self.to_attr = to_attr
def __getstate__(self):
obj_dict = self.__dict__.copy()
if self.queryset is not None:
queryset = self.queryset._chain()
# Prevent the QuerySet from being evaluated
queryset._result_cache = []
queryset._prefetch_done = True
obj_dict["queryset"] = queryset
return obj_dict
def add_prefix(self, prefix):
self.prefetch_through = prefix + LOOKUP_SEP + self.prefetch_through
self.prefetch_to = prefix + LOOKUP_SEP + self.prefetch_to
def get_current_prefetch_to(self, level):
return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[: level + 1])
def get_current_to_attr(self, level):
parts = self.prefetch_to.split(LOOKUP_SEP)
to_attr = parts[level]
as_attr = self.to_attr and level == len(parts) - 1
return to_attr, as_attr
def get_current_queryset(self, level):
if self.get_current_prefetch_to(level) == self.prefetch_to:
return self.queryset
return None
def __eq__(self, other):
if not isinstance(other, Prefetch):
return NotImplemented
return self.prefetch_to == other.prefetch_to
def __hash__(self):
return hash((self.__class__, self.prefetch_to))
def normalize_prefetch_lookups(lookups, prefix=None):
"""Normalize lookups into Prefetch objects."""
ret = []
for lookup in lookups:
if not isinstance(lookup, Prefetch):
lookup = Prefetch(lookup)
if prefix:
lookup.add_prefix(prefix)
ret.append(lookup)
return ret
def prefetch_related_objects(model_instances, *related_lookups):
"""
Populate prefetched object caches for a list of model instances based on
the lookups/Prefetch instances given.
"""
if not model_instances:
return # nothing to do
# We need to be able to dynamically add to the list of prefetch_related
# lookups that we look up (see below). So we need some book keeping to
# ensure we don't do duplicate work.
done_queries = {} # dictionary of things like 'foo__bar': [results]
auto_lookups = set() # we add to this as we go through.
followed_descriptors = set() # recursion protection
all_lookups = normalize_prefetch_lookups(reversed(related_lookups))
while all_lookups:
lookup = all_lookups.pop()
if lookup.prefetch_to in done_queries:
if lookup.queryset is not None:
raise ValueError(
"'%s' lookup was already seen with a different queryset. "
"You may need to adjust the ordering of your lookups."
% lookup.prefetch_to
)
continue
# Top level, the list of objects to decorate is the result cache
# from the primary QuerySet. It won't be for deeper levels.
obj_list = model_instances
through_attrs = lookup.prefetch_through.split(LOOKUP_SEP)
for level, through_attr in enumerate(through_attrs):
# Prepare main instances
if not obj_list:
break
prefetch_to = lookup.get_current_prefetch_to(level)
if prefetch_to in done_queries:
# Skip any prefetching, and any object preparation
obj_list = done_queries[prefetch_to]
continue
# Prepare objects:
good_objects = True
for obj in obj_list:
# Since prefetching can re-use instances, it is possible to have
# the same instance multiple times in obj_list, so obj might
# already be prepared.
if not hasattr(obj, "_prefetched_objects_cache"):
try:
obj._prefetched_objects_cache = {}
except (AttributeError, TypeError):
# Must be an immutable object from
# values_list(flat=True), for example (TypeError) or
# a QuerySet subclass that isn't returning Model
# instances (AttributeError), either in Django or a 3rd
# party. prefetch_related() doesn't make sense, so quit.
good_objects = False
break
if not good_objects:
break
# Descend down tree
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
first_obj = obj_list[0]
to_attr = lookup.get_current_to_attr(level)[0]
prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(
first_obj, through_attr, to_attr
)
if not attr_found:
raise AttributeError(
"Cannot find '%s' on %s object, '%s' is an invalid "
"parameter to prefetch_related()"
% (
through_attr,
first_obj.__class__.__name__,
lookup.prefetch_through,
)
)
if level == len(through_attrs) - 1 and prefetcher is None:
# Last one, this *must* resolve to something that supports
# prefetching, otherwise there is no point adding it and the
# developer asking for it has made a mistake.
raise ValueError(
"'%s' does not resolve to an item that supports "
"prefetching - this is an invalid parameter to "
"prefetch_related()." % lookup.prefetch_through
)
obj_to_fetch = None
if prefetcher is not None:
obj_to_fetch = [obj for obj in obj_list if not is_fetched(obj)]
if obj_to_fetch:
obj_list, additional_lookups = prefetch_one_level(
obj_to_fetch,
prefetcher,
lookup,
level,
)
# We need to ensure we don't keep adding lookups from the
# same relationships to stop infinite recursion. So, if we
# are already on an automatically added lookup, don't add
# the new lookups from relationships we've seen already.
if not (
prefetch_to in done_queries
and lookup in auto_lookups
and descriptor in followed_descriptors
):
done_queries[prefetch_to] = obj_list
new_lookups = normalize_prefetch_lookups(
reversed(additional_lookups), prefetch_to
)
auto_lookups.update(new_lookups)
all_lookups.extend(new_lookups)
followed_descriptors.add(descriptor)
else:
# Either a singly related object that has already been fetched
# (e.g. via select_related), or hopefully some other property
# that doesn't support prefetching but needs to be traversed.
# We replace the current list of parent objects with the list
# of related objects, filtering out empty or missing values so
# that we can continue with nullable or reverse relations.
new_obj_list = []
for obj in obj_list:
if through_attr in getattr(obj, "_prefetched_objects_cache", ()):
# If related objects have been prefetched, use the
# cache rather than the object's through_attr.
new_obj = list(obj._prefetched_objects_cache.get(through_attr))
else:
try:
new_obj = getattr(obj, through_attr)
except exceptions.ObjectDoesNotExist:
continue
if new_obj is None:
continue
# We special-case `list` rather than something more generic
# like `Iterable` because we don't want to accidentally match
# user models that define __iter__.
if isinstance(new_obj, list):
new_obj_list.extend(new_obj)
else:
new_obj_list.append(new_obj)
obj_list = new_obj_list
def get_prefetcher(instance, through_attr, to_attr):
"""
For the attribute 'through_attr' on the given instance, find
an object that has a get_prefetch_queryset().
Return a 4 tuple containing:
(the object with get_prefetch_queryset (or None),
the descriptor object representing this relationship (or None),
a boolean that is False if the attribute was not found at all,
a function that takes an instance and returns a boolean that is True if
the attribute has already been fetched for that instance)
"""
def has_to_attr_attribute(instance):
return hasattr(instance, to_attr)
prefetcher = None
is_fetched = has_to_attr_attribute
# For singly related objects, we have to avoid getting the attribute
# from the object, as this will trigger the query. So we first try
# on the class, in order to get the descriptor object.
rel_obj_descriptor = getattr(instance.__class__, through_attr, None)
if rel_obj_descriptor is None:
attr_found = hasattr(instance, through_attr)
else:
attr_found = True
if rel_obj_descriptor:
# singly related object, descriptor object has the
# get_prefetch_queryset() method.
if hasattr(rel_obj_descriptor, "get_prefetch_queryset"):
prefetcher = rel_obj_descriptor
is_fetched = rel_obj_descriptor.is_cached
else:
# descriptor doesn't support prefetching, so we go ahead and get
# the attribute on the instance rather than the class to
# support many related managers
rel_obj = getattr(instance, through_attr)
if hasattr(rel_obj, "get_prefetch_queryset"):
prefetcher = rel_obj
if through_attr != to_attr:
# Special case cached_property instances because hasattr
# triggers attribute computation and assignment.
if isinstance(
getattr(instance.__class__, to_attr, None), cached_property
):
def has_cached_property(instance):
return to_attr in instance.__dict__
is_fetched = has_cached_property
else:
def in_prefetched_cache(instance):
return through_attr in instance._prefetched_objects_cache
is_fetched = in_prefetched_cache
return prefetcher, rel_obj_descriptor, attr_found, is_fetched
def prefetch_one_level(instances, prefetcher, lookup, level):
"""
Helper function for prefetch_related_objects().
Run prefetches on all instances using the prefetcher object,
assigning results to relevant caches in instance.
Return the prefetched objects along with any additional prefetches that
must be done due to prefetch_related lookups found from default managers.
"""
# prefetcher must have a method get_prefetch_queryset() which takes a list
# of instances, and returns a tuple:
# (queryset of instances of self.model that are related to passed in instances,
# callable that gets value to be matched for returned instances,
# callable that gets value to be matched for passed in instances,
# boolean that is True for singly related objects,
# cache or field name to assign to,
# boolean that is True when the previous argument is a cache name vs a field name).
# The 'values to be matched' must be hashable as they will be used
# in a dictionary.
(
rel_qs,
rel_obj_attr,
instance_attr,
single,
cache_name,
is_descriptor,
) = prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level))
# We have to handle the possibility that the QuerySet we just got back
# contains some prefetch_related lookups. We don't want to trigger the
# prefetch_related functionality by evaluating the query. Rather, we need
# to merge in the prefetch_related lookups.
# Copy the lookups in case it is a Prefetch object which could be reused
# later (happens in nested prefetch_related).
additional_lookups = [
copy.copy(additional_lookup)
for additional_lookup in getattr(rel_qs, "_prefetch_related_lookups", ())
]
if additional_lookups:
# Don't need to clone because the manager should have given us a fresh
# instance, so we access an internal instead of using public interface
# for performance reasons.
rel_qs._prefetch_related_lookups = ()
all_related_objects = list(rel_qs)
rel_obj_cache = {}
for rel_obj in all_related_objects:
rel_attr_val = rel_obj_attr(rel_obj)
rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj)
to_attr, as_attr = lookup.get_current_to_attr(level)
# Make sure `to_attr` does not conflict with a field.
if as_attr and instances:
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
model = instances[0].__class__
try:
model._meta.get_field(to_attr)
except exceptions.FieldDoesNotExist:
pass
else:
msg = "to_attr={} conflicts with a field on the {} model."
raise ValueError(msg.format(to_attr, model.__name__))
# Whether or not we're prefetching the last part of the lookup.
leaf = len(lookup.prefetch_through.split(LOOKUP_SEP)) - 1 == level
for obj in instances:
instance_attr_val = instance_attr(obj)
vals = rel_obj_cache.get(instance_attr_val, [])
if single:
val = vals[0] if vals else None
if as_attr:
# A to_attr has been given for the prefetch.
setattr(obj, to_attr, val)
elif is_descriptor:
# cache_name points to a field name in obj.
# This field is a descriptor for a related object.
setattr(obj, cache_name, val)
else:
# No to_attr has been given for this prefetch operation and the
# cache_name does not point to a descriptor. Store the value of
# the field in the object's field cache.
obj._state.fields_cache[cache_name] = val
else:
if as_attr:
setattr(obj, to_attr, vals)
else:
manager = getattr(obj, to_attr)
if leaf and lookup.queryset is not None:
qs = manager._apply_rel_filters(lookup.queryset)
else:
qs = manager.get_queryset()
qs._result_cache = vals
# We don't want the individual qs doing prefetch_related now,
# since we have merged this into the current work.
qs._prefetch_done = True
obj._prefetched_objects_cache[cache_name] = qs
return all_related_objects, additional_lookups
class RelatedPopulator:
"""
RelatedPopulator is used for select_related() object instantiation.
The idea is that each select_related() model will be populated by a
different RelatedPopulator instance. The RelatedPopulator instances get
klass_info and select (computed in SQLCompiler) plus the used db as
input for initialization. That data is used to compute which columns
to use, how to instantiate the model, and how to populate the links
between the objects.
The actual creation of the objects is done in populate() method. This
method gets row and from_obj as input and populates the select_related()
model instance.
"""
def __init__(self, klass_info, select, db):
self.db = db
# Pre-compute needed attributes. The attributes are:
# - model_cls: the possibly deferred model class to instantiate
# - either:
# - cols_start, cols_end: usually the columns in the row are
# in the same order model_cls.__init__ expects them, so we
# can instantiate by model_cls(*row[cols_start:cols_end])
# - reorder_for_init: When select_related descends to a child
# class, then we want to reuse the already selected parent
# data. However, in this case the parent data isn't necessarily
# in the same order that Model.__init__ expects it to be, so
# we have to reorder the parent data. The reorder_for_init
# attribute contains a function used to reorder the field data
# in the order __init__ expects it.
# - pk_idx: the index of the primary key field in the reordered
# model data. Used to check if a related object exists at all.
# - init_list: the field attnames fetched from the database. For
# deferred models this isn't the same as all attnames of the
# model's fields.
# - related_populators: a list of RelatedPopulator instances if
# select_related() descends to related models from this model.
# - local_setter, remote_setter: Methods to set cached values on
# the object being populated and on the remote object. Usually
# these are Field.set_cached_value() methods.
select_fields = klass_info["select_fields"]
from_parent = klass_info["from_parent"]
if not from_parent:
self.cols_start = select_fields[0]
self.cols_end = select_fields[-1] + 1
self.init_list = [
f[0].target.attname for f in select[self.cols_start : self.cols_end]
]
self.reorder_for_init = None
else:
attname_indexes = {
select[idx][0].target.attname: idx for idx in select_fields
}
model_init_attnames = (
f.attname for f in klass_info["model"]._meta.concrete_fields
)
self.init_list = [
attname for attname in model_init_attnames if attname in attname_indexes
]
self.reorder_for_init = operator.itemgetter(
*[attname_indexes[attname] for attname in self.init_list]
)
self.model_cls = klass_info["model"]
self.pk_idx = self.init_list.index(self.model_cls._meta.pk.attname)
self.related_populators = get_related_populators(klass_info, select, self.db)
self.local_setter = klass_info["local_setter"]
self.remote_setter = klass_info["remote_setter"]
def populate(self, row, from_obj):
if self.reorder_for_init:
obj_data = self.reorder_for_init(row)
else:
obj_data = row[self.cols_start : self.cols_end]
if obj_data[self.pk_idx] is None:
obj = None
else:
obj = self.model_cls.from_db(self.db, self.init_list, obj_data)
for rel_iter in self.related_populators:
rel_iter.populate(row, obj)
self.local_setter(from_obj, obj)
if obj is not None:
self.remote_setter(obj, from_obj)
def get_related_populators(klass_info, select, db):
iterators = []
related_klass_infos = klass_info.get("related_klass_infos", [])
for rel_klass_info in related_klass_infos:
rel_cls = RelatedPopulator(rel_klass_info, select, db)
iterators.append(rel_cls)
return iterators
|
8fabf9537c1ac5713eda119ef8869e22c08ec58b2f29bf1e0f44dbe1eb3913ac | import copy
import inspect
import warnings
from functools import partialmethod
from itertools import chain
import django
from django.apps import apps
from django.conf import settings
from django.core import checks
from django.core.exceptions import (
NON_FIELD_ERRORS,
FieldDoesNotExist,
FieldError,
MultipleObjectsReturned,
ObjectDoesNotExist,
ValidationError,
)
from django.db import (
DJANGO_VERSION_PICKLE_KEY,
DatabaseError,
connection,
connections,
router,
transaction,
)
from django.db.models import NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value
from django.db.models.constants import LOOKUP_SEP
from django.db.models.constraints import CheckConstraint, UniqueConstraint
from django.db.models.deletion import CASCADE, Collector
from django.db.models.fields.related import (
ForeignObjectRel,
OneToOneField,
lazy_related_operation,
resolve_relation,
)
from django.db.models.functions import Coalesce
from django.db.models.manager import Manager
from django.db.models.options import Options
from django.db.models.query import F, Q
from django.db.models.signals import (
class_prepared,
post_init,
post_save,
pre_init,
pre_save,
)
from django.db.models.utils import make_model_tuple
from django.utils.encoding import force_str
from django.utils.hashable import make_hashable
from django.utils.text import capfirst, get_text_list
from django.utils.translation import gettext_lazy as _
class Deferred:
def __repr__(self):
return "<Deferred field>"
def __str__(self):
return "<Deferred field>"
DEFERRED = Deferred()
def subclass_exception(name, bases, module, attached_to):
"""
Create exception subclass. Used by ModelBase below.
The exception is created in a way that allows it to be pickled, assuming
that the returned exception class will be added as an attribute to the
'attached_to' class.
"""
return type(
name,
bases,
{
"__module__": module,
"__qualname__": "%s.%s" % (attached_to.__qualname__, name),
},
)
def _has_contribute_to_class(value):
# Only call contribute_to_class() if it's bound.
return not inspect.isclass(value) and hasattr(value, "contribute_to_class")
class ModelBase(type):
"""Metaclass for all models."""
def __new__(cls, name, bases, attrs, **kwargs):
super_new = super().__new__
# Also ensure initialization is only performed for subclasses of Model
# (excluding Model class itself).
parents = [b for b in bases if isinstance(b, ModelBase)]
if not parents:
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop("__module__")
new_attrs = {"__module__": module}
classcell = attrs.pop("__classcell__", None)
if classcell is not None:
new_attrs["__classcell__"] = classcell
attr_meta = attrs.pop("Meta", None)
# Pass all attrs without a (Django-specific) contribute_to_class()
# method to type.__new__() so that they're properly initialized
# (i.e. __set_name__()).
contributable_attrs = {}
for obj_name, obj in attrs.items():
if _has_contribute_to_class(obj):
contributable_attrs[obj_name] = obj
else:
new_attrs[obj_name] = obj
new_class = super_new(cls, name, bases, new_attrs, **kwargs)
abstract = getattr(attr_meta, "abstract", False)
meta = attr_meta or getattr(new_class, "Meta", None)
base_meta = getattr(new_class, "_meta", None)
app_label = None
# Look for an application configuration to attach the model to.
app_config = apps.get_containing_app_config(module)
if getattr(meta, "app_label", None) is None:
if app_config is None:
if not abstract:
raise RuntimeError(
"Model class %s.%s doesn't declare an explicit "
"app_label and isn't in an application in "
"INSTALLED_APPS." % (module, name)
)
else:
app_label = app_config.label
new_class.add_to_class("_meta", Options(meta, app_label))
if not abstract:
new_class.add_to_class(
"DoesNotExist",
subclass_exception(
"DoesNotExist",
tuple(
x.DoesNotExist
for x in parents
if hasattr(x, "_meta") and not x._meta.abstract
)
or (ObjectDoesNotExist,),
module,
attached_to=new_class,
),
)
new_class.add_to_class(
"MultipleObjectsReturned",
subclass_exception(
"MultipleObjectsReturned",
tuple(
x.MultipleObjectsReturned
for x in parents
if hasattr(x, "_meta") and not x._meta.abstract
)
or (MultipleObjectsReturned,),
module,
attached_to=new_class,
),
)
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, "ordering"):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, "get_latest_by"):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
# If the model is a proxy, ensure that the base class
# hasn't been swapped out.
if is_proxy and base_meta and base_meta.swapped:
raise TypeError(
"%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)
)
# Add remaining attributes (those with a contribute_to_class() method)
# to the class.
for obj_name, obj in contributable_attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = chain(
new_class._meta.local_fields,
new_class._meta.local_many_to_many,
new_class._meta.private_fields,
)
field_names = {f.name for f in new_fields}
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [kls for kls in parents if hasattr(kls, "_meta")]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError(
"Abstract base class containing model fields not "
"permitted for proxy model '%s'." % name
)
else:
continue
if base is None:
base = parent
elif parent._meta.concrete_model is not base._meta.concrete_model:
raise TypeError(
"Proxy model '%s' has more than one non-abstract model base "
"class." % name
)
if base is None:
raise TypeError(
"Proxy model '%s' has no non-abstract model base class." % name
)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
else:
new_class._meta.concrete_model = new_class
# Collect the parent links for multi-table inheritance.
parent_links = {}
for base in reversed([new_class] + parents):
# Conceptually equivalent to `if base is Model`.
if not hasattr(base, "_meta"):
continue
# Skip concrete parent classes.
if base != new_class and not base._meta.abstract:
continue
# Locate OneToOneField instances.
for field in base._meta.local_fields:
if isinstance(field, OneToOneField) and field.remote_field.parent_link:
related = resolve_relation(new_class, field.remote_field.model)
parent_links[make_model_tuple(related)] = field
# Track fields inherited from base models.
inherited_attributes = set()
# Do the appropriate setup for any model parents.
for base in new_class.mro():
if base not in parents or not hasattr(base, "_meta"):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
inherited_attributes.update(base.__dict__)
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
if not base._meta.abstract:
# Check for clashes between locally declared fields and those
# on the base classes.
for field in parent_fields:
if field.name in field_names:
raise FieldError(
"Local field %r in class %r clashes with field of "
"the same name from base class %r."
% (
field.name,
name,
base.__name__,
)
)
else:
inherited_attributes.add(field.name)
# Concrete classes...
base = base._meta.concrete_model
base_key = make_model_tuple(base)
if base_key in parent_links:
field = parent_links[base_key]
elif not is_proxy:
attr_name = "%s_ptr" % base._meta.model_name
field = OneToOneField(
base,
on_delete=CASCADE,
name=attr_name,
auto_created=True,
parent_link=True,
)
if attr_name in field_names:
raise FieldError(
"Auto-generated field '%s' in class %r for "
"parent_link to base class %r clashes with "
"declared field of the same name."
% (
attr_name,
name,
base.__name__,
)
)
# Only add the ptr field if it's not already present;
# e.g. migrations will already have it specified
if not hasattr(new_class, attr_name):
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
base_parents = base._meta.parents.copy()
# Add fields from abstract base class if it wasn't overridden.
for field in parent_fields:
if (
field.name not in field_names
and field.name not in new_class.__dict__
and field.name not in inherited_attributes
):
new_field = copy.deepcopy(field)
new_class.add_to_class(field.name, new_field)
# Replace parent links defined on this base by the new
# field. It will be appropriately resolved if required.
if field.one_to_one:
for parent, parent_link in base_parents.items():
if field == parent_link:
base_parents[parent] = new_field
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base_parents)
# Inherit private fields (like GenericForeignKey) from the parent
# class
for field in base._meta.private_fields:
if field.name in field_names:
if not base._meta.abstract:
raise FieldError(
"Local field %r in class %r clashes with field of "
"the same name from base class %r."
% (
field.name,
name,
base.__name__,
)
)
else:
field = copy.deepcopy(field)
if not base._meta.abstract:
field.mti_inherited = True
new_class.add_to_class(field.name, field)
# Copy indexes so that index names are unique when models extend an
# abstract model.
new_class._meta.indexes = [
copy.deepcopy(idx) for idx in new_class._meta.indexes
]
if abstract:
# Abstract base models can't be instantiated and don't appear in
# the list of models for an app. We do the final setup for them a
# little differently from normal models.
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
return new_class
def add_to_class(cls, name, value):
if _has_contribute_to_class(value):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
"""Create some methods once self._meta has been populated."""
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = partialmethod(
cls._get_next_or_previous_in_order, is_next=True
)
cls.get_previous_in_order = partialmethod(
cls._get_next_or_previous_in_order, is_next=False
)
# Defer creating accessors on the foreign class until it has been
# created and registered. If remote_field is None, we're ordering
# with respect to a GenericForeignKey and don't know what the
# foreign class is - we'll add those accessors later in
# contribute_to_class().
if opts.order_with_respect_to.remote_field:
wrt = opts.order_with_respect_to
remote = wrt.remote_field.model
lazy_related_operation(make_foreign_order_accessors, cls, remote)
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (
cls.__name__,
", ".join(f.name for f in opts.fields),
)
get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(
opts.label_lower
)
if get_absolute_url_override:
setattr(cls, "get_absolute_url", get_absolute_url_override)
if not opts.managers:
if any(f.name == "objects" for f in opts.fields):
raise ValueError(
"Model %s must specify a custom Manager, because it has a "
"field named 'objects'." % cls.__name__
)
manager = Manager()
manager.auto_created = True
cls.add_to_class("objects", manager)
# Set the name of _meta.indexes. This can't be done in
# Options.contribute_to_class() because fields haven't been added to
# the model at that point.
for index in cls._meta.indexes:
if not index.name:
index.set_name_with_model(cls)
class_prepared.send(sender=cls)
@property
def _base_manager(cls):
return cls._meta.base_manager
@property
def _default_manager(cls):
return cls._meta.default_manager
class ModelStateCacheDescriptor:
"""
Upon first access, replace itself with an empty dictionary on the instance.
"""
def __set_name__(self, owner, name):
self.attribute_name = name
def __get__(self, instance, cls=None):
if instance is None:
return self
res = instance.__dict__[self.attribute_name] = {}
return res
class ModelState:
"""Store model instance state."""
db = None
# If true, uniqueness validation checks will consider this a new, unsaved
# object. Necessary for correct validation of new instances of objects with
# explicit (non-auto) PKs. This impacts validation only; it has no effect
# on the actual save.
adding = True
fields_cache = ModelStateCacheDescriptor()
related_managers_cache = ModelStateCacheDescriptor()
def __getstate__(self):
state = self.__dict__.copy()
if "fields_cache" in state:
state["fields_cache"] = self.fields_cache.copy()
# Manager instances stored in related_managers_cache won't necessarily
# be deserializable if they were dynamically created via an inner
# scope, e.g. create_forward_many_to_many_manager() and
# create_generic_related_manager().
if "related_managers_cache" in state:
state["related_managers_cache"] = {}
return state
class Model(metaclass=ModelBase):
def __init__(self, *args, **kwargs):
# Alias some things as locals to avoid repeat global lookups
cls = self.__class__
opts = self._meta
_setattr = setattr
_DEFERRED = DEFERRED
if opts.abstract:
raise TypeError("Abstract models cannot be instantiated.")
pre_init.send(sender=cls, args=args, kwargs=kwargs)
# Set up the storage for instance state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
if len(args) > len(opts.concrete_fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
if not kwargs:
fields_iter = iter(opts.concrete_fields)
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
for val, field in zip(args, fields_iter):
if val is _DEFERRED:
continue
_setattr(self, field.attname, val)
else:
# Slower, kwargs-ready version.
fields_iter = iter(opts.fields)
for val, field in zip(args, fields_iter):
if val is _DEFERRED:
continue
_setattr(self, field.attname, val)
if kwargs.pop(field.name, NOT_PROVIDED) is not NOT_PROVIDED:
raise TypeError(
f"{cls.__qualname__}() got both positional and "
f"keyword arguments for field '{field.name}'."
)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
is_related_object = False
# Virtual field
if field.attname not in kwargs and field.column is None:
continue
if kwargs:
if isinstance(field.remote_field, ForeignObjectRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
is_related_object = True
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
try:
val = kwargs.pop(field.attname)
except KeyError:
# This is done with an exception rather than the
# default argument on pop because we don't want
# get_default() to be evaluated, and then not used.
# Refs #12057.
val = field.get_default()
else:
val = field.get_default()
if is_related_object:
# If we are passed a related instance, set it using the
# field.name instead of field.attname (e.g. "user" instead of
# "user_id") so that the object gets properly cached (and type
# checked) by the RelatedObjectDescriptor.
if rel_obj is not _DEFERRED:
_setattr(self, field.name, rel_obj)
else:
if val is not _DEFERRED:
_setattr(self, field.attname, val)
if kwargs:
property_names = opts._property_names
unexpected = ()
for prop, value in kwargs.items():
# Any remaining kwargs must correspond to properties or virtual
# fields.
if prop in property_names:
if value is not _DEFERRED:
_setattr(self, prop, value)
else:
try:
opts.get_field(prop)
except FieldDoesNotExist:
unexpected += (prop,)
else:
if value is not _DEFERRED:
_setattr(self, prop, value)
if unexpected:
unexpected_names = ", ".join(repr(n) for n in unexpected)
raise TypeError(
f"{cls.__name__}() got unexpected keyword arguments: "
f"{unexpected_names}"
)
super().__init__()
post_init.send(sender=cls, instance=self)
@classmethod
def from_db(cls, db, field_names, values):
if len(values) != len(cls._meta.concrete_fields):
values_iter = iter(values)
values = [
next(values_iter) if f.attname in field_names else DEFERRED
for f in cls._meta.concrete_fields
]
new = cls(*values)
new._state.adding = False
new._state.db = db
return new
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def __str__(self):
return "%s object (%s)" % (self.__class__.__name__, self.pk)
def __eq__(self, other):
if not isinstance(other, Model):
return NotImplemented
if self._meta.concrete_model != other._meta.concrete_model:
return False
my_pk = self.pk
if my_pk is None:
return self is other
return my_pk == other.pk
def __hash__(self):
if self.pk is None:
raise TypeError("Model instances without primary key value are unhashable")
return hash(self.pk)
def __reduce__(self):
data = self.__getstate__()
data[DJANGO_VERSION_PICKLE_KEY] = django.__version__
class_id = self._meta.app_label, self._meta.object_name
return model_unpickle, (class_id,), data
def __getstate__(self):
"""Hook to allow choosing the attributes to pickle."""
state = self.__dict__.copy()
state["_state"] = copy.copy(state["_state"])
# memoryview cannot be pickled, so cast it to bytes and store
# separately.
_memoryview_attrs = []
for attr, value in state.items():
if isinstance(value, memoryview):
_memoryview_attrs.append((attr, bytes(value)))
if _memoryview_attrs:
state["_memoryview_attrs"] = _memoryview_attrs
for attr, value in _memoryview_attrs:
state.pop(attr)
return state
def __setstate__(self, state):
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
if pickled_version != django.__version__:
warnings.warn(
"Pickled model instance's Django version %s does not "
"match the current version %s."
% (pickled_version, django.__version__),
RuntimeWarning,
stacklevel=2,
)
else:
warnings.warn(
"Pickled model instance's Django version is not specified.",
RuntimeWarning,
stacklevel=2,
)
if "_memoryview_attrs" in state:
for attr, value in state.pop("_memoryview_attrs"):
state[attr] = memoryview(value)
self.__dict__.update(state)
def _get_pk_val(self, meta=None):
meta = meta or self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
for parent_link in self._meta.parents.values():
if parent_link and parent_link != self._meta.pk:
setattr(self, parent_link.target_field.attname, value)
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def get_deferred_fields(self):
"""
Return a set containing names of deferred fields for this instance.
"""
return {
f.attname
for f in self._meta.concrete_fields
if f.attname not in self.__dict__
}
def refresh_from_db(self, using=None, fields=None):
"""
Reload field values from the database.
By default, the reloading happens from the database this instance was
loaded from, or by the read router if this instance wasn't loaded from
any database. The using parameter will override the default.
Fields can be used to specify which fields to reload. The fields
should be an iterable of field attnames. If fields is None, then
all non-deferred fields are reloaded.
When accessing deferred fields of an instance, the deferred loading
of the field will call this method.
"""
if fields is None:
self._prefetched_objects_cache = {}
else:
prefetched_objects_cache = getattr(self, "_prefetched_objects_cache", ())
for field in fields:
if field in prefetched_objects_cache:
del prefetched_objects_cache[field]
fields.remove(field)
if not fields:
return
if any(LOOKUP_SEP in f for f in fields):
raise ValueError(
'Found "%s" in fields argument. Relations and transforms '
"are not allowed in fields." % LOOKUP_SEP
)
hints = {"instance": self}
db_instance_qs = self.__class__._base_manager.db_manager(
using, hints=hints
).filter(pk=self.pk)
# Use provided fields, if not set then reload all non-deferred fields.
deferred_fields = self.get_deferred_fields()
if fields is not None:
fields = list(fields)
db_instance_qs = db_instance_qs.only(*fields)
elif deferred_fields:
fields = [
f.attname
for f in self._meta.concrete_fields
if f.attname not in deferred_fields
]
db_instance_qs = db_instance_qs.only(*fields)
db_instance = db_instance_qs.get()
non_loaded_fields = db_instance.get_deferred_fields()
for field in self._meta.concrete_fields:
if field.attname in non_loaded_fields:
# This field wasn't refreshed - skip ahead.
continue
setattr(self, field.attname, getattr(db_instance, field.attname))
# Clear cached foreign keys.
if field.is_relation and field.is_cached(self):
field.delete_cached_value(self)
# Clear cached relations.
for field in self._meta.related_objects:
if field.is_cached(self):
field.delete_cached_value(self)
self._state.db = db_instance._state.db
def serializable_value(self, field_name):
"""
Return the value of the field name for this instance. If the field is
a foreign key, return the id value instead of the object. If there's
no Field object with this name on the model, return the model
attribute's value.
Used to serialize a field's value (in the serializer, or form output,
for example). Normally, you would just access the attribute directly
and not use this method.
"""
try:
field = self._meta.get_field(field_name)
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(
self, force_insert=False, force_update=False, using=None, update_fields=None
):
"""
Save the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be an SQL insert or update (or equivalent for
non-SQL backends), respectively. Normally, they should not be set.
"""
self._prepare_related_fields_for_save(operation_name="save")
using = using or router.db_for_write(self.__class__, instance=self)
if force_insert and (force_update or update_fields):
raise ValueError("Cannot force both insert and updating in model saving.")
deferred_fields = self.get_deferred_fields()
if update_fields is not None:
# If update_fields is empty, skip the save. We do also check for
# no-op saves later on for inheritance cases. This bailout is
# still needed for skipping signal sending.
if not update_fields:
return
update_fields = frozenset(update_fields)
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key:
field_names.add(field.name)
if field.name != field.attname:
field_names.add(field.attname)
non_model_fields = update_fields.difference(field_names)
if non_model_fields:
raise ValueError(
"The following fields do not exist in this model, are m2m "
"fields, or are non-concrete fields: %s"
% ", ".join(non_model_fields)
)
# If saving to the same database, and this model is deferred, then
# automatically do an "update_fields" save on the loaded fields.
elif not force_insert and deferred_fields and using == self._state.db:
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key and not hasattr(field, "through"):
field_names.add(field.attname)
loaded_fields = field_names.difference(deferred_fields)
if loaded_fields:
update_fields = frozenset(loaded_fields)
self.save_base(
using=using,
force_insert=force_insert,
force_update=force_update,
update_fields=update_fields,
)
save.alters_data = True
def save_base(
self,
raw=False,
force_insert=False,
force_update=False,
using=None,
update_fields=None,
):
"""
Handle the parts of saving which should be done only once per save,
yet need to be done in raw saves, too. This includes some sanity
checks and signal sending.
The 'raw' argument is telling save_base not to save any parent
models and not to do any changes to the values before save. This
is used by fixture loading.
"""
using = using or router.db_for_write(self.__class__, instance=self)
assert not (force_insert and (force_update or update_fields))
assert update_fields is None or update_fields
cls = origin = self.__class__
# Skip proxies, but keep the origin as the proxy model.
if cls._meta.proxy:
cls = cls._meta.concrete_model
meta = cls._meta
if not meta.auto_created:
pre_save.send(
sender=origin,
instance=self,
raw=raw,
using=using,
update_fields=update_fields,
)
# A transaction isn't needed if one query is issued.
if meta.parents:
context_manager = transaction.atomic(using=using, savepoint=False)
else:
context_manager = transaction.mark_for_rollback_on_error(using=using)
with context_manager:
parent_inserted = False
if not raw:
parent_inserted = self._save_parents(cls, using, update_fields)
updated = self._save_table(
raw,
cls,
force_insert or parent_inserted,
force_update,
using,
update_fields,
)
# Store the database on which the object was saved
self._state.db = using
# Once saved, this is no longer a to-be-added instance.
self._state.adding = False
# Signal that the save is complete
if not meta.auto_created:
post_save.send(
sender=origin,
instance=self,
created=(not updated),
update_fields=update_fields,
raw=raw,
using=using,
)
save_base.alters_data = True
def _save_parents(self, cls, using, update_fields):
"""Save all the parents of cls using values from self."""
meta = cls._meta
inserted = False
for parent, field in meta.parents.items():
# Make sure the link fields are synced between parent and self.
if (
field
and getattr(self, parent._meta.pk.attname) is None
and getattr(self, field.attname) is not None
):
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
parent_inserted = self._save_parents(
cls=parent, using=using, update_fields=update_fields
)
updated = self._save_table(
cls=parent,
using=using,
update_fields=update_fields,
force_insert=parent_inserted,
)
if not updated:
inserted = True
# Set the parent's PK value to self.
if field:
setattr(self, field.attname, self._get_pk_val(parent._meta))
# Since we didn't have an instance of the parent handy set
# attname directly, bypassing the descriptor. Invalidate
# the related object cache, in case it's been accidentally
# populated. A fresh instance will be re-built from the
# database if necessary.
if field.is_cached(self):
field.delete_cached_value(self)
return inserted
def _save_table(
self,
raw=False,
cls=None,
force_insert=False,
force_update=False,
using=None,
update_fields=None,
):
"""
Do the heavy-lifting involved in saving. Update or insert the data
for a single table.
"""
meta = cls._meta
non_pks = [f for f in meta.local_concrete_fields if not f.primary_key]
if update_fields:
non_pks = [
f
for f in non_pks
if f.name in update_fields or f.attname in update_fields
]
pk_val = self._get_pk_val(meta)
if pk_val is None:
pk_val = meta.pk.get_pk_value_on_save(self)
setattr(self, meta.pk.attname, pk_val)
pk_set = pk_val is not None
if not pk_set and (force_update or update_fields):
raise ValueError("Cannot force an update in save() with no primary key.")
updated = False
# Skip an UPDATE when adding an instance and primary key has a default.
if (
not raw
and not force_insert
and self._state.adding
and meta.pk.default
and meta.pk.default is not NOT_PROVIDED
):
force_insert = True
# If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
if pk_set and not force_insert:
base_qs = cls._base_manager.using(using)
values = [
(
f,
None,
(getattr(self, f.attname) if raw else f.pre_save(self, False)),
)
for f in non_pks
]
forced_update = update_fields or force_update
updated = self._do_update(
base_qs, using, pk_val, values, update_fields, forced_update
)
if force_update and not updated:
raise DatabaseError("Forced update did not affect any rows.")
if update_fields and not updated:
raise DatabaseError("Save with update_fields did not affect any rows.")
if not updated:
if meta.order_with_respect_to:
# If this is a model with an order_with_respect_to
# autopopulate the _order field
field = meta.order_with_respect_to
filter_args = field.get_filter_kwargs_for_object(self)
self._order = (
cls._base_manager.using(using)
.filter(**filter_args)
.aggregate(
_order__max=Coalesce(
ExpressionWrapper(
Max("_order") + Value(1), output_field=IntegerField()
),
Value(0),
),
)["_order__max"]
)
fields = meta.local_concrete_fields
if not pk_set:
fields = [f for f in fields if f is not meta.auto_field]
returning_fields = meta.db_returning_fields
results = self._do_insert(
cls._base_manager, using, fields, returning_fields, raw
)
if results:
for value, field in zip(results[0], returning_fields):
setattr(self, field.attname, value)
return updated
def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
"""
Try to update the model. Return True if the model was updated (if an
update query was done and a matching row was found in the DB).
"""
filtered = base_qs.filter(pk=pk_val)
if not values:
# We can end up here when saving a model in inheritance chain where
# update_fields doesn't target any field in current model. In that
# case we just say the update succeeded. Another case ending up here
# is a model with just PK - in that case check that the PK still
# exists.
return update_fields is not None or filtered.exists()
if self._meta.select_on_save and not forced_update:
return (
filtered.exists()
and
# It may happen that the object is deleted from the DB right after
# this check, causing the subsequent UPDATE to return zero matching
# rows. The same result can occur in some rare cases when the
# database returns zero despite the UPDATE being executed
# successfully (a row is matched and updated). In order to
# distinguish these two cases, the object's existence in the
# database is again checked for if the UPDATE query returns 0.
(filtered._update(values) > 0 or filtered.exists())
)
return filtered._update(values) > 0
def _do_insert(self, manager, using, fields, returning_fields, raw):
"""
Do an INSERT. If returning_fields is defined then this method should
return the newly created data for the model.
"""
return manager._insert(
[self],
fields=fields,
returning_fields=returning_fields,
using=using,
raw=raw,
)
def _prepare_related_fields_for_save(self, operation_name, fields=None):
# Ensure that a model instance without a PK hasn't been assigned to
# a ForeignKey, GenericForeignKey or OneToOneField on this model. If
# the field is nullable, allowing the save would result in silent data
# loss.
for field in self._meta.concrete_fields:
if fields and field not in fields:
continue
# If the related field isn't cached, then an instance hasn't been
# assigned and there's no need to worry about this check.
if field.is_relation and field.is_cached(self):
obj = getattr(self, field.name, None)
if not obj:
continue
# A pk may have been assigned manually to a model instance not
# saved to the database (or auto-generated in a case like
# UUIDField), but we allow the save to proceed and rely on the
# database to raise an IntegrityError if applicable. If
# constraints aren't supported by the database, there's the
# unavoidable risk of data corruption.
if obj.pk is None:
# Remove the object from a related instance cache.
if not field.remote_field.multiple:
field.remote_field.delete_cached_value(obj)
raise ValueError(
"%s() prohibited to prevent data loss due to unsaved "
"related object '%s'." % (operation_name, field.name)
)
elif getattr(self, field.attname) in field.empty_values:
# Use pk from related object if it has been saved after
# an assignment.
setattr(self, field.attname, obj.pk)
# If the relationship's pk/to_field was changed, clear the
# cached relationship.
if getattr(obj, field.target_field.attname) != getattr(
self, field.attname
):
field.delete_cached_value(self)
# GenericForeignKeys are private.
for field in self._meta.private_fields:
if fields and field not in fields:
continue
if (
field.is_relation
and field.is_cached(self)
and hasattr(field, "fk_field")
):
obj = field.get_cached_value(self, default=None)
if obj and obj.pk is None:
raise ValueError(
f"{operation_name}() prohibited to prevent data loss due to "
f"unsaved related object '{field.name}'."
)
def delete(self, using=None, keep_parents=False):
if self.pk is None:
raise ValueError(
"%s object can't be deleted because its %s attribute is set "
"to None." % (self._meta.object_name, self._meta.pk.attname)
)
using = using or router.db_for_write(self.__class__, instance=self)
collector = Collector(using=using, origin=self)
collector.collect([self], keep_parents=keep_parents)
return collector.delete()
delete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
choices_dict = dict(make_hashable(field.flatchoices))
# force_str() to coerce lazy strings.
return force_str(
choices_dict.get(make_hashable(value), value), strings_only=True
)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
if not self.pk:
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
op = "gt" if is_next else "lt"
order = "" if is_next else "-"
param = getattr(self, field.attname)
q = Q((field.name, param), (f"pk__{op}", self.pk), _connector=Q.AND)
q = Q(q, (f"{field.name}__{op}", param), _connector=Q.OR)
qs = (
self.__class__._default_manager.using(self._state.db)
.filter(**kwargs)
.filter(q)
.order_by("%s%s" % (order, field.name), "%spk" % order)
)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist(
"%s matching query does not exist." % self.__class__._meta.object_name
)
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
op = "gt" if is_next else "lt"
order = "_order" if is_next else "-_order"
order_field = self._meta.order_with_respect_to
filter_args = order_field.get_filter_kwargs_for_object(self)
obj = (
self.__class__._default_manager.filter(**filter_args)
.filter(
**{
"_order__%s"
% op: self.__class__._default_manager.values("_order").filter(
**{self._meta.pk.name: self.pk}
)
}
)
.order_by(order)[:1]
.get()
)
setattr(self, cachename, obj)
return getattr(self, cachename)
def prepare_database_save(self, field):
if self.pk is None:
raise ValueError(
"Unsaved model instance %r cannot be used in an ORM query." % self
)
return getattr(self, field.remote_field.get_related_field().attname)
def clean(self):
"""
Hook for doing any extra model-wide validation after clean() has been
called on every field by self.clean_fields. Any ValidationError raised
by this method will not be associated with a particular field; it will
have a special-case association with the field defined by NON_FIELD_ERRORS.
"""
pass
def validate_unique(self, exclude=None):
"""
Check unique constraints on the model and raise ValidationError if any
failed.
"""
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
errors = self._perform_unique_checks(unique_checks)
date_errors = self._perform_date_checks(date_checks)
for k, v in date_errors.items():
errors.setdefault(k, []).extend(v)
if errors:
raise ValidationError(errors)
def _get_unique_checks(self, exclude=None):
"""
Return a list of checks to perform. Since validate_unique() could be
called from a ModelForm, some fields may have been excluded; we can't
perform a unique check on a model that is missing fields involved
in that check. Fields that did not validate should also be excluded,
but they need to be passed in via the exclude argument.
"""
if exclude is None:
exclude = set()
unique_checks = []
unique_togethers = [(self.__class__, self._meta.unique_together)]
constraints = [(self.__class__, self._meta.total_unique_constraints)]
for parent_class in self._meta.get_parent_list():
if parent_class._meta.unique_together:
unique_togethers.append(
(parent_class, parent_class._meta.unique_together)
)
if parent_class._meta.total_unique_constraints:
constraints.append(
(parent_class, parent_class._meta.total_unique_constraints)
)
for model_class, unique_together in unique_togethers:
for check in unique_together:
if not any(name in exclude for name in check):
# Add the check if the field isn't excluded.
unique_checks.append((model_class, tuple(check)))
for model_class, model_constraints in constraints:
for constraint in model_constraints:
if not any(name in exclude for name in constraint.fields):
unique_checks.append((model_class, constraint.fields))
# These are checks for the unique_for_<date/year/month>.
date_checks = []
# Gather a list of checks for fields declared as unique and add them to
# the list of checks.
fields_with_class = [(self.__class__, self._meta.local_fields)]
for parent_class in self._meta.get_parent_list():
fields_with_class.append((parent_class, parent_class._meta.local_fields))
for model_class, fields in fields_with_class:
for f in fields:
name = f.name
if name in exclude:
continue
if f.unique:
unique_checks.append((model_class, (name,)))
if f.unique_for_date and f.unique_for_date not in exclude:
date_checks.append((model_class, "date", name, f.unique_for_date))
if f.unique_for_year and f.unique_for_year not in exclude:
date_checks.append((model_class, "year", name, f.unique_for_year))
if f.unique_for_month and f.unique_for_month not in exclude:
date_checks.append((model_class, "month", name, f.unique_for_month))
return unique_checks, date_checks
def _perform_unique_checks(self, unique_checks):
errors = {}
for model_class, unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
f = self._meta.get_field(field_name)
lookup_value = getattr(self, f.attname)
# TODO: Handle multiple backends with different feature flags.
if lookup_value is None or (
lookup_value == ""
and connection.features.interprets_empty_strings_as_nulls
):
# no value, skip the lookup
continue
if f.primary_key and not self._state.adding:
# no need to check for unique primary key when editing
continue
lookup_kwargs[str(field_name)] = lookup_value
# some fields were skipped, no reason to do the check
if len(unique_check) != len(lookup_kwargs):
continue
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
# Note that we need to use the pk as defined by model_class, not
# self.pk. These can be different fields because model inheritance
# allows single model to have effectively multiple primary keys.
# Refs #17615.
model_class_pk = self._get_pk_val(model_class._meta)
if not self._state.adding and model_class_pk is not None:
qs = qs.exclude(pk=model_class_pk)
if qs.exists():
if len(unique_check) == 1:
key = unique_check[0]
else:
key = NON_FIELD_ERRORS
errors.setdefault(key, []).append(
self.unique_error_message(model_class, unique_check)
)
return errors
def _perform_date_checks(self, date_checks):
errors = {}
for model_class, lookup_type, field, unique_for in date_checks:
lookup_kwargs = {}
# there's a ticket to add a date lookup, we can remove this special
# case if that makes it's way in
date = getattr(self, unique_for)
if date is None:
continue
if lookup_type == "date":
lookup_kwargs["%s__day" % unique_for] = date.day
lookup_kwargs["%s__month" % unique_for] = date.month
lookup_kwargs["%s__year" % unique_for] = date.year
else:
lookup_kwargs["%s__%s" % (unique_for, lookup_type)] = getattr(
date, lookup_type
)
lookup_kwargs[field] = getattr(self, field)
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
if not self._state.adding and self.pk is not None:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.setdefault(field, []).append(
self.date_error_message(lookup_type, field, unique_for)
)
return errors
def date_error_message(self, lookup_type, field_name, unique_for):
opts = self._meta
field = opts.get_field(field_name)
return ValidationError(
message=field.error_messages["unique_for_date"],
code="unique_for_date",
params={
"model": self,
"model_name": capfirst(opts.verbose_name),
"lookup_type": lookup_type,
"field": field_name,
"field_label": capfirst(field.verbose_name),
"date_field": unique_for,
"date_field_label": capfirst(opts.get_field(unique_for).verbose_name),
},
)
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
params = {
"model": self,
"model_class": model_class,
"model_name": capfirst(opts.verbose_name),
"unique_check": unique_check,
}
# A unique field
if len(unique_check) == 1:
field = opts.get_field(unique_check[0])
params["field_label"] = capfirst(field.verbose_name)
return ValidationError(
message=field.error_messages["unique"],
code="unique",
params=params,
)
# unique_together
else:
field_labels = [
capfirst(opts.get_field(f).verbose_name) for f in unique_check
]
params["field_labels"] = get_text_list(field_labels, _("and"))
return ValidationError(
message=_("%(model_name)s with this %(field_labels)s already exists."),
code="unique_together",
params=params,
)
def full_clean(self, exclude=None, validate_unique=True):
"""
Call clean_fields(), clean(), and validate_unique() on the model.
Raise a ValidationError for any errors that occur.
"""
errors = {}
if exclude is None:
exclude = set()
else:
exclude = set(exclude)
try:
self.clean_fields(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Form.clean() is run even if other validation fails, so do the
# same with Model.clean() for consistency.
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run unique checks, but only for fields that passed validation.
if validate_unique:
for name in errors:
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.add(name)
try:
self.validate_unique(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
"""
Clean all fields and raise a ValidationError containing a dict
of all validation errors if any occur.
"""
if exclude is None:
exclude = set()
errors = {}
for f in self._meta.fields:
if f.name in exclude:
continue
# Skip validation for empty fields with blank=True. The developer
# is responsible for making sure they have a valid value.
raw_value = getattr(self, f.attname)
if f.blank and raw_value in f.empty_values:
continue
try:
setattr(self, f.attname, f.clean(raw_value, self))
except ValidationError as e:
errors[f.name] = e.error_list
if errors:
raise ValidationError(errors)
@classmethod
def check(cls, **kwargs):
errors = [
*cls._check_swappable(),
*cls._check_model(),
*cls._check_managers(**kwargs),
]
if not cls._meta.swapped:
databases = kwargs.get("databases") or []
errors += [
*cls._check_fields(**kwargs),
*cls._check_m2m_through_same_relationship(),
*cls._check_long_column_names(databases),
]
clash_errors = (
*cls._check_id_field(),
*cls._check_field_name_clashes(),
*cls._check_model_name_db_lookup_clashes(),
*cls._check_property_name_related_field_accessor_clashes(),
*cls._check_single_primary_key(),
)
errors.extend(clash_errors)
# If there are field name clashes, hide consequent column name
# clashes.
if not clash_errors:
errors.extend(cls._check_column_name_clashes())
errors += [
*cls._check_index_together(),
*cls._check_unique_together(),
*cls._check_indexes(databases),
*cls._check_ordering(),
*cls._check_constraints(databases),
*cls._check_default_pk(),
]
return errors
@classmethod
def _check_default_pk(cls):
if (
not cls._meta.abstract
and cls._meta.pk.auto_created
and
# Inherited PKs are checked in parents models.
not (
isinstance(cls._meta.pk, OneToOneField)
and cls._meta.pk.remote_field.parent_link
)
and not settings.is_overridden("DEFAULT_AUTO_FIELD")
and cls._meta.app_config
and not cls._meta.app_config._is_default_auto_field_overridden
):
return [
checks.Warning(
f"Auto-created primary key used when not defining a "
f"primary key type, by default "
f"'{settings.DEFAULT_AUTO_FIELD}'.",
hint=(
f"Configure the DEFAULT_AUTO_FIELD setting or the "
f"{cls._meta.app_config.__class__.__qualname__}."
f"default_auto_field attribute to point to a subclass "
f"of AutoField, e.g. 'django.db.models.BigAutoField'."
),
obj=cls,
id="models.W042",
),
]
return []
@classmethod
def _check_swappable(cls):
"""Check if the swapped model exists."""
errors = []
if cls._meta.swapped:
try:
apps.get_model(cls._meta.swapped)
except ValueError:
errors.append(
checks.Error(
"'%s' is not of the form 'app_label.app_name'."
% cls._meta.swappable,
id="models.E001",
)
)
except LookupError:
app_label, model_name = cls._meta.swapped.split(".")
errors.append(
checks.Error(
"'%s' references '%s.%s', which has not been "
"installed, or is abstract."
% (cls._meta.swappable, app_label, model_name),
id="models.E002",
)
)
return errors
@classmethod
def _check_model(cls):
errors = []
if cls._meta.proxy:
if cls._meta.local_fields or cls._meta.local_many_to_many:
errors.append(
checks.Error(
"Proxy model '%s' contains model fields." % cls.__name__,
id="models.E017",
)
)
return errors
@classmethod
def _check_managers(cls, **kwargs):
"""Perform all manager checks."""
errors = []
for manager in cls._meta.managers:
errors.extend(manager.check(**kwargs))
return errors
@classmethod
def _check_fields(cls, **kwargs):
"""Perform all field checks."""
errors = []
for field in cls._meta.local_fields:
errors.extend(field.check(**kwargs))
for field in cls._meta.local_many_to_many:
errors.extend(field.check(from_model=cls, **kwargs))
return errors
@classmethod
def _check_m2m_through_same_relationship(cls):
"""Check if no relationship model is used by more than one m2m field."""
errors = []
seen_intermediary_signatures = []
fields = cls._meta.local_many_to_many
# Skip when the target model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase))
# Skip when the relationship model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase))
for f in fields:
signature = (
f.remote_field.model,
cls,
f.remote_field.through,
f.remote_field.through_fields,
)
if signature in seen_intermediary_signatures:
errors.append(
checks.Error(
"The model has two identical many-to-many relations "
"through the intermediate model '%s'."
% f.remote_field.through._meta.label,
obj=cls,
id="models.E003",
)
)
else:
seen_intermediary_signatures.append(signature)
return errors
@classmethod
def _check_id_field(cls):
"""Check if `id` field is a primary key."""
fields = [
f for f in cls._meta.local_fields if f.name == "id" and f != cls._meta.pk
]
# fields is empty or consists of the invalid "id" field
if fields and not fields[0].primary_key and cls._meta.pk.name == "id":
return [
checks.Error(
"'id' can only be used as a field name if the field also "
"sets 'primary_key=True'.",
obj=cls,
id="models.E004",
)
]
else:
return []
@classmethod
def _check_field_name_clashes(cls):
"""Forbid field shadowing in multi-table inheritance."""
errors = []
used_fields = {} # name or attname -> field
# Check that multi-inheritance doesn't cause field name shadowing.
for parent in cls._meta.get_parent_list():
for f in parent._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
if clash:
errors.append(
checks.Error(
"The field '%s' from parent model "
"'%s' clashes with the field '%s' "
"from parent model '%s'."
% (clash.name, clash.model._meta, f.name, f.model._meta),
obj=cls,
id="models.E005",
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
# Check that fields defined in the model don't clash with fields from
# parents, including auto-generated fields like multi-table inheritance
# child accessors.
for parent in cls._meta.get_parent_list():
for f in parent._meta.get_fields():
if f not in used_fields:
used_fields[f.name] = f
for f in cls._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
# Note that we may detect clash between user-defined non-unique
# field "id" and automatically added unique field "id", both
# defined at the same model. This special case is considered in
# _check_id_field and here we ignore it.
id_conflict = (
f.name == "id" and clash and clash.name == "id" and clash.model == cls
)
if clash and not id_conflict:
errors.append(
checks.Error(
"The field '%s' clashes with the field '%s' "
"from model '%s'." % (f.name, clash.name, clash.model._meta),
obj=f,
id="models.E006",
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
return errors
@classmethod
def _check_column_name_clashes(cls):
# Store a list of column names which have already been used by other fields.
used_column_names = []
errors = []
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Ensure the column name is not already in use.
if column_name and column_name in used_column_names:
errors.append(
checks.Error(
"Field '%s' has column name '%s' that is used by "
"another field." % (f.name, column_name),
hint="Specify a 'db_column' for the field.",
obj=cls,
id="models.E007",
)
)
else:
used_column_names.append(column_name)
return errors
@classmethod
def _check_model_name_db_lookup_clashes(cls):
errors = []
model_name = cls.__name__
if model_name.startswith("_") or model_name.endswith("_"):
errors.append(
checks.Error(
"The model name '%s' cannot start or end with an underscore "
"as it collides with the query lookup syntax." % model_name,
obj=cls,
id="models.E023",
)
)
elif LOOKUP_SEP in model_name:
errors.append(
checks.Error(
"The model name '%s' cannot contain double underscores as "
"it collides with the query lookup syntax." % model_name,
obj=cls,
id="models.E024",
)
)
return errors
@classmethod
def _check_property_name_related_field_accessor_clashes(cls):
errors = []
property_names = cls._meta._property_names
related_field_accessors = (
f.get_attname()
for f in cls._meta._get_fields(reverse=False)
if f.is_relation and f.related_model is not None
)
for accessor in related_field_accessors:
if accessor in property_names:
errors.append(
checks.Error(
"The property '%s' clashes with a related field "
"accessor." % accessor,
obj=cls,
id="models.E025",
)
)
return errors
@classmethod
def _check_single_primary_key(cls):
errors = []
if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1:
errors.append(
checks.Error(
"The model cannot have more than one field with "
"'primary_key=True'.",
obj=cls,
id="models.E026",
)
)
return errors
@classmethod
def _check_index_together(cls):
"""Check the value of "index_together" option."""
if not isinstance(cls._meta.index_together, (tuple, list)):
return [
checks.Error(
"'index_together' must be a list or tuple.",
obj=cls,
id="models.E008",
)
]
elif any(
not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together
):
return [
checks.Error(
"All 'index_together' elements must be lists or tuples.",
obj=cls,
id="models.E009",
)
]
else:
errors = []
for fields in cls._meta.index_together:
errors.extend(cls._check_local_fields(fields, "index_together"))
return errors
@classmethod
def _check_unique_together(cls):
"""Check the value of "unique_together" option."""
if not isinstance(cls._meta.unique_together, (tuple, list)):
return [
checks.Error(
"'unique_together' must be a list or tuple.",
obj=cls,
id="models.E010",
)
]
elif any(
not isinstance(fields, (tuple, list))
for fields in cls._meta.unique_together
):
return [
checks.Error(
"All 'unique_together' elements must be lists or tuples.",
obj=cls,
id="models.E011",
)
]
else:
errors = []
for fields in cls._meta.unique_together:
errors.extend(cls._check_local_fields(fields, "unique_together"))
return errors
@classmethod
def _check_indexes(cls, databases):
"""Check fields, names, and conditions of indexes."""
errors = []
references = set()
for index in cls._meta.indexes:
# Index name can't start with an underscore or a number, restricted
# for cross-database compatibility with Oracle.
if index.name[0] == "_" or index.name[0].isdigit():
errors.append(
checks.Error(
"The index name '%s' cannot start with an underscore "
"or a number." % index.name,
obj=cls,
id="models.E033",
),
)
if len(index.name) > index.max_name_length:
errors.append(
checks.Error(
"The index name '%s' cannot be longer than %d "
"characters." % (index.name, index.max_name_length),
obj=cls,
id="models.E034",
),
)
if index.contains_expressions:
for expression in index.expressions:
references.update(
ref[0] for ref in cls._get_expr_references(expression)
)
for db in databases:
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
if not (
connection.features.supports_partial_indexes
or "supports_partial_indexes" in cls._meta.required_db_features
) and any(index.condition is not None for index in cls._meta.indexes):
errors.append(
checks.Warning(
"%s does not support indexes with conditions."
% connection.display_name,
hint=(
"Conditions will be ignored. Silence this warning "
"if you don't care about it."
),
obj=cls,
id="models.W037",
)
)
if not (
connection.features.supports_covering_indexes
or "supports_covering_indexes" in cls._meta.required_db_features
) and any(index.include for index in cls._meta.indexes):
errors.append(
checks.Warning(
"%s does not support indexes with non-key columns."
% connection.display_name,
hint=(
"Non-key columns will be ignored. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W040",
)
)
if not (
connection.features.supports_expression_indexes
or "supports_expression_indexes" in cls._meta.required_db_features
) and any(index.contains_expressions for index in cls._meta.indexes):
errors.append(
checks.Warning(
"%s does not support indexes on expressions."
% connection.display_name,
hint=(
"An index won't be created. Silence this warning "
"if you don't care about it."
),
obj=cls,
id="models.W043",
)
)
fields = [
field for index in cls._meta.indexes for field, _ in index.fields_orders
]
fields += [include for index in cls._meta.indexes for include in index.include]
fields += references
errors.extend(cls._check_local_fields(fields, "indexes"))
return errors
@classmethod
def _check_local_fields(cls, fields, option):
from django.db import models
# In order to avoid hitting the relation tree prematurely, we use our
# own fields_map instead of using get_field()
forward_fields_map = {}
for field in cls._meta._get_fields(reverse=False):
forward_fields_map[field.name] = field
if hasattr(field, "attname"):
forward_fields_map[field.attname] = field
errors = []
for field_name in fields:
try:
field = forward_fields_map[field_name]
except KeyError:
errors.append(
checks.Error(
"'%s' refers to the nonexistent field '%s'."
% (
option,
field_name,
),
obj=cls,
id="models.E012",
)
)
else:
if isinstance(field.remote_field, models.ManyToManyRel):
errors.append(
checks.Error(
"'%s' refers to a ManyToManyField '%s', but "
"ManyToManyFields are not permitted in '%s'."
% (
option,
field_name,
option,
),
obj=cls,
id="models.E013",
)
)
elif field not in cls._meta.local_fields:
errors.append(
checks.Error(
"'%s' refers to field '%s' which is not local to model "
"'%s'." % (option, field_name, cls._meta.object_name),
hint="This issue may be caused by multi-table inheritance.",
obj=cls,
id="models.E016",
)
)
return errors
@classmethod
def _check_ordering(cls):
"""
Check "ordering" option -- is it a list of strings and do all fields
exist?
"""
if cls._meta._ordering_clash:
return [
checks.Error(
"'ordering' and 'order_with_respect_to' cannot be used together.",
obj=cls,
id="models.E021",
),
]
if cls._meta.order_with_respect_to or not cls._meta.ordering:
return []
if not isinstance(cls._meta.ordering, (list, tuple)):
return [
checks.Error(
"'ordering' must be a tuple or list (even if you want to order by "
"only one field).",
obj=cls,
id="models.E014",
)
]
errors = []
fields = cls._meta.ordering
# Skip expressions and '?' fields.
fields = (f for f in fields if isinstance(f, str) and f != "?")
# Convert "-field" to "field".
fields = ((f[1:] if f.startswith("-") else f) for f in fields)
# Separate related fields and non-related fields.
_fields = []
related_fields = []
for f in fields:
if LOOKUP_SEP in f:
related_fields.append(f)
else:
_fields.append(f)
fields = _fields
# Check related fields.
for field in related_fields:
_cls = cls
fld = None
for part in field.split(LOOKUP_SEP):
try:
# pk is an alias that won't be found by opts.get_field.
if part == "pk":
fld = _cls._meta.pk
else:
fld = _cls._meta.get_field(part)
if fld.is_relation:
_cls = fld.path_infos[-1].to_opts.model
else:
_cls = None
except (FieldDoesNotExist, AttributeError):
if fld is None or (
fld.get_transform(part) is None and fld.get_lookup(part) is None
):
errors.append(
checks.Error(
"'ordering' refers to the nonexistent field, "
"related field, or lookup '%s'." % field,
obj=cls,
id="models.E015",
)
)
# Skip ordering on pk. This is always a valid order_by field
# but is an alias and therefore won't be found by opts.get_field.
fields = {f for f in fields if f != "pk"}
# Check for invalid or nonexistent fields in ordering.
invalid_fields = []
# Any field name that is not present in field_names does not exist.
# Also, ordering by m2m fields is not allowed.
opts = cls._meta
valid_fields = set(
chain.from_iterable(
(f.name, f.attname)
if not (f.auto_created and not f.concrete)
else (f.field.related_query_name(),)
for f in chain(opts.fields, opts.related_objects)
)
)
invalid_fields.extend(fields - valid_fields)
for invalid_field in invalid_fields:
errors.append(
checks.Error(
"'ordering' refers to the nonexistent field, related "
"field, or lookup '%s'." % invalid_field,
obj=cls,
id="models.E015",
)
)
return errors
@classmethod
def _check_long_column_names(cls, databases):
"""
Check that any auto-generated column names are shorter than the limits
for each database in which the model will be created.
"""
if not databases:
return []
errors = []
allowed_len = None
db_alias = None
# Find the minimum max allowed length among all specified db_aliases.
for db in databases:
# skip databases where the model won't be created
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
max_name_length = connection.ops.max_name_length()
if max_name_length is None or connection.features.truncates_names:
continue
else:
if allowed_len is None:
allowed_len = max_name_length
db_alias = db
elif max_name_length < allowed_len:
allowed_len = max_name_length
db_alias = db
if allowed_len is None:
return errors
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Check if auto-generated name for the field is too long
# for the database.
if (
f.db_column is None
and column_name is not None
and len(column_name) > allowed_len
):
errors.append(
checks.Error(
'Autogenerated column name too long for field "%s". '
'Maximum length is "%s" for database "%s".'
% (column_name, allowed_len, db_alias),
hint="Set the column name manually using 'db_column'.",
obj=cls,
id="models.E018",
)
)
for f in cls._meta.local_many_to_many:
# Skip nonexistent models.
if isinstance(f.remote_field.through, str):
continue
# Check if auto-generated name for the M2M field is too long
# for the database.
for m2m in f.remote_field.through._meta.local_fields:
_, rel_name = m2m.get_attname_column()
if (
m2m.db_column is None
and rel_name is not None
and len(rel_name) > allowed_len
):
errors.append(
checks.Error(
"Autogenerated column name too long for M2M field "
'"%s". Maximum length is "%s" for database "%s".'
% (rel_name, allowed_len, db_alias),
hint=(
"Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'."
),
obj=cls,
id="models.E019",
)
)
return errors
@classmethod
def _get_expr_references(cls, expr):
if isinstance(expr, Q):
for child in expr.children:
if isinstance(child, tuple):
lookup, value = child
yield tuple(lookup.split(LOOKUP_SEP))
yield from cls._get_expr_references(value)
else:
yield from cls._get_expr_references(child)
elif isinstance(expr, F):
yield tuple(expr.name.split(LOOKUP_SEP))
elif hasattr(expr, "get_source_expressions"):
for src_expr in expr.get_source_expressions():
yield from cls._get_expr_references(src_expr)
@classmethod
def _check_constraints(cls, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
if not (
connection.features.supports_table_check_constraints
or "supports_table_check_constraints" in cls._meta.required_db_features
) and any(
isinstance(constraint, CheckConstraint)
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support check constraints."
% connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W027",
)
)
if not (
connection.features.supports_partial_indexes
or "supports_partial_indexes" in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint)
and constraint.condition is not None
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support unique constraints with "
"conditions." % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W036",
)
)
if not (
connection.features.supports_deferrable_unique_constraints
or "supports_deferrable_unique_constraints"
in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint)
and constraint.deferrable is not None
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support deferrable unique constraints."
% connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W038",
)
)
if not (
connection.features.supports_covering_indexes
or "supports_covering_indexes" in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint) and constraint.include
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support unique constraints with non-key "
"columns." % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W039",
)
)
if not (
connection.features.supports_expression_indexes
or "supports_expression_indexes" in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint)
and constraint.contains_expressions
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support unique constraints on "
"expressions." % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W044",
)
)
fields = set(
chain.from_iterable(
(*constraint.fields, *constraint.include)
for constraint in cls._meta.constraints
if isinstance(constraint, UniqueConstraint)
)
)
references = set()
for constraint in cls._meta.constraints:
if isinstance(constraint, UniqueConstraint):
if (
connection.features.supports_partial_indexes
or "supports_partial_indexes"
not in cls._meta.required_db_features
) and isinstance(constraint.condition, Q):
references.update(
cls._get_expr_references(constraint.condition)
)
if (
connection.features.supports_expression_indexes
or "supports_expression_indexes"
not in cls._meta.required_db_features
) and constraint.contains_expressions:
for expression in constraint.expressions:
references.update(cls._get_expr_references(expression))
elif isinstance(constraint, CheckConstraint):
if (
connection.features.supports_table_check_constraints
or "supports_table_check_constraints"
not in cls._meta.required_db_features
) and isinstance(constraint.check, Q):
references.update(cls._get_expr_references(constraint.check))
for field_name, *lookups in references:
# pk is an alias that won't be found by opts.get_field.
if field_name != "pk":
fields.add(field_name)
if not lookups:
# If it has no lookups it cannot result in a JOIN.
continue
try:
if field_name == "pk":
field = cls._meta.pk
else:
field = cls._meta.get_field(field_name)
if not field.is_relation or field.many_to_many or field.one_to_many:
continue
except FieldDoesNotExist:
continue
# JOIN must happen at the first lookup.
first_lookup = lookups[0]
if (
hasattr(field, "get_transform")
and hasattr(field, "get_lookup")
and field.get_transform(first_lookup) is None
and field.get_lookup(first_lookup) is None
):
errors.append(
checks.Error(
"'constraints' refers to the joined field '%s'."
% LOOKUP_SEP.join([field_name] + lookups),
obj=cls,
id="models.E041",
)
)
errors.extend(cls._check_local_fields(fields, "constraints"))
return errors
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(self, ordered_obj, id_list, using=None):
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update(
[ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list)],
["_order"],
)
def method_get_order(self, ordered_obj):
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
pk_name = ordered_obj._meta.pk.name
return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True)
def make_foreign_order_accessors(model, related_model):
setattr(
related_model,
"get_%s_order" % model.__name__.lower(),
partialmethod(method_get_order, model),
)
setattr(
related_model,
"set_%s_order" % model.__name__.lower(),
partialmethod(method_set_order, model),
)
########
# MISC #
########
def model_unpickle(model_id):
"""Used to unpickle Model subclasses with deferred fields."""
if isinstance(model_id, tuple):
model = apps.get_model(*model_id)
else:
# Backwards compat - the model was cached directly in earlier versions.
model = model_id
return model.__new__(model)
model_unpickle.__safe_for_unpickle__ = True
|
e41623f3792b2148e043a05e2807465366bf45136b77f8ebe57a51427c1081f4 | """
Various data structures used in query construction.
Factored out from django.db.models.query to avoid making the main module very
large and/or so that they can be used by other modules without getting into
circular import difficulties.
"""
import copy
import functools
import inspect
from collections import namedtuple
from django.core.exceptions import FieldError
from django.db.models.constants import LOOKUP_SEP
from django.utils import tree
# PathInfo is used when converting lookups (fk__somecol). The contents
# describe the relation in Model terms (model Options and Fields for both
# sides of the relation. The join_field is the field backing the relation.
PathInfo = namedtuple(
"PathInfo",
"from_opts to_opts target_fields join_field m2m direct filtered_relation",
)
def subclasses(cls):
yield cls
for subclass in cls.__subclasses__():
yield from subclasses(subclass)
class Q(tree.Node):
"""
Encapsulate filters as objects that can then be combined logically (using
`&` and `|`).
"""
# Connection types
AND = "AND"
OR = "OR"
XOR = "XOR"
default = AND
conditional = True
def __init__(self, *args, _connector=None, _negated=False, **kwargs):
super().__init__(
children=[*args, *sorted(kwargs.items())],
connector=_connector,
negated=_negated,
)
def _combine(self, other, conn):
if not (isinstance(other, Q) or getattr(other, "conditional", False) is True):
raise TypeError(other)
if not self:
return other.copy() if hasattr(other, "copy") else copy.copy(other)
elif isinstance(other, Q) and not other:
_, args, kwargs = self.deconstruct()
return type(self)(*args, **kwargs)
obj = type(self)()
obj.connector = conn
obj.add(self, conn)
obj.add(other, conn)
return obj
def __or__(self, other):
return self._combine(other, self.OR)
def __and__(self, other):
return self._combine(other, self.AND)
def __xor__(self, other):
return self._combine(other, self.XOR)
def __invert__(self):
obj = type(self)()
obj.add(self, self.AND)
obj.negate()
return obj
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
# We must promote any new joins to left outer joins so that when Q is
# used as an expression, rows aren't filtered due to joins.
clause, joins = query._add_q(
self,
reuse,
allow_joins=allow_joins,
split_subq=False,
check_filterable=False,
)
query.promote_joins(joins)
return clause
def deconstruct(self):
path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
if path.startswith("django.db.models.query_utils"):
path = path.replace("django.db.models.query_utils", "django.db.models")
args = tuple(self.children)
kwargs = {}
if self.connector != self.default:
kwargs["_connector"] = self.connector
if self.negated:
kwargs["_negated"] = True
return path, args, kwargs
class DeferredAttribute:
"""
A wrapper for a deferred-loading field. When the value is read from this
object the first time, the query is executed.
"""
def __init__(self, field):
self.field = field
def __get__(self, instance, cls=None):
"""
Retrieve and caches the value from the datastore on the first lookup.
Return the cached value.
"""
if instance is None:
return self
data = instance.__dict__
field_name = self.field.attname
if field_name not in data:
# Let's see if the field is part of the parent chain. If so we
# might be able to reuse the already loaded value. Refs #18343.
val = self._check_parent_chain(instance)
if val is None:
instance.refresh_from_db(fields=[field_name])
else:
data[field_name] = val
return data[field_name]
def _check_parent_chain(self, instance):
"""
Check if the field value can be fetched from a parent field already
loaded in the instance. This can be done if the to-be fetched
field is a primary key field.
"""
opts = instance._meta
link_field = opts.get_ancestor_link(self.field.model)
if self.field.primary_key and self.field != link_field:
return getattr(instance, link_field.attname)
return None
class RegisterLookupMixin:
@classmethod
def _get_lookup(cls, lookup_name):
return cls.get_lookups().get(lookup_name, None)
@classmethod
@functools.lru_cache(maxsize=None)
def get_lookups(cls):
class_lookups = [
parent.__dict__.get("class_lookups", {}) for parent in inspect.getmro(cls)
]
return cls.merge_dicts(class_lookups)
def get_lookup(self, lookup_name):
from django.db.models.lookups import Lookup
found = self._get_lookup(lookup_name)
if found is None and hasattr(self, "output_field"):
return self.output_field.get_lookup(lookup_name)
if found is not None and not issubclass(found, Lookup):
return None
return found
def get_transform(self, lookup_name):
from django.db.models.lookups import Transform
found = self._get_lookup(lookup_name)
if found is None and hasattr(self, "output_field"):
return self.output_field.get_transform(lookup_name)
if found is not None and not issubclass(found, Transform):
return None
return found
@staticmethod
def merge_dicts(dicts):
"""
Merge dicts in reverse to preference the order of the original list. e.g.,
merge_dicts([a, b]) will preference the keys in 'a' over those in 'b'.
"""
merged = {}
for d in reversed(dicts):
merged.update(d)
return merged
@classmethod
def _clear_cached_lookups(cls):
for subclass in subclasses(cls):
subclass.get_lookups.cache_clear()
@classmethod
def register_lookup(cls, lookup, lookup_name=None):
if lookup_name is None:
lookup_name = lookup.lookup_name
if "class_lookups" not in cls.__dict__:
cls.class_lookups = {}
cls.class_lookups[lookup_name] = lookup
cls._clear_cached_lookups()
return lookup
@classmethod
def _unregister_lookup(cls, lookup, lookup_name=None):
"""
Remove given lookup from cls lookups. For use in tests only as it's
not thread-safe.
"""
if lookup_name is None:
lookup_name = lookup.lookup_name
del cls.class_lookups[lookup_name]
cls._clear_cached_lookups()
def select_related_descend(field, restricted, requested, load_fields, reverse=False):
"""
Return True if this field should be used to descend deeper for
select_related() purposes. Used by both the query construction code
(compiler.get_related_selections()) and the model instance creation code
(compiler.klass_info).
Arguments:
* field - the field to be checked
* restricted - a boolean field, indicating if the field list has been
manually restricted using a requested clause)
* requested - The select_related() dictionary.
* load_fields - the set of fields to be loaded on this model
* reverse - boolean, True if we are checking a reverse select related
"""
if not field.remote_field:
return False
if field.remote_field.parent_link and not reverse:
return False
if restricted:
if reverse and field.related_query_name() not in requested:
return False
if not reverse and field.name not in requested:
return False
if not restricted and field.null:
return False
if load_fields:
if field.attname not in load_fields:
if restricted and field.name in requested:
msg = (
"Field %s.%s cannot be both deferred and traversed using "
"select_related at the same time."
) % (field.model._meta.object_name, field.name)
raise FieldError(msg)
return True
def refs_expression(lookup_parts, annotations):
"""
Check if the lookup_parts contains references to the given annotations set.
Because the LOOKUP_SEP is contained in the default annotation names, check
each prefix of the lookup_parts for a match.
"""
for n in range(1, len(lookup_parts) + 1):
level_n_lookup = LOOKUP_SEP.join(lookup_parts[0:n])
if level_n_lookup in annotations and annotations[level_n_lookup]:
return annotations[level_n_lookup], lookup_parts[n:]
return False, ()
def check_rel_lookup_compatibility(model, target_opts, field):
"""
Check that self.model is compatible with target_opts. Compatibility
is OK if:
1) model and opts match (where proxy inheritance is removed)
2) model is parent of opts' model or the other way around
"""
def check(opts):
return (
model._meta.concrete_model == opts.concrete_model
or opts.concrete_model in model._meta.get_parent_list()
or model in opts.get_parent_list()
)
# If the field is a primary key, then doing a query against the field's
# model is ok, too. Consider the case:
# class Restaurant(models.Model):
# place = OneToOneField(Place, primary_key=True):
# Restaurant.objects.filter(pk__in=Restaurant.objects.all()).
# If we didn't have the primary key check, then pk__in (== place__in) would
# give Place's opts as the target opts, but Restaurant isn't compatible
# with that. This logic applies only to primary keys, as when doing __in=qs,
# we are going to turn this into __in=qs.values('pk') later on.
return check(target_opts) or (
getattr(field, "primary_key", False) and check(field.model._meta)
)
class FilteredRelation:
"""Specify custom filtering in the ON clause of SQL joins."""
def __init__(self, relation_name, *, condition=Q()):
if not relation_name:
raise ValueError("relation_name cannot be empty.")
self.relation_name = relation_name
self.alias = None
if not isinstance(condition, Q):
raise ValueError("condition argument must be a Q() instance.")
self.condition = condition
self.path = []
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return (
self.relation_name == other.relation_name
and self.alias == other.alias
and self.condition == other.condition
)
def clone(self):
clone = FilteredRelation(self.relation_name, condition=self.condition)
clone.alias = self.alias
clone.path = self.path[:]
return clone
def resolve_expression(self, *args, **kwargs):
"""
QuerySet.annotate() only accepts expression-like arguments
(with a resolve_expression() method).
"""
raise NotImplementedError("FilteredRelation.resolve_expression() is unused.")
def as_sql(self, compiler, connection):
# Resolve the condition in Join.filtered_relation.
query = compiler.query
where = query.build_filtered_relation_q(self.condition, reuse=set(self.path))
return compiler.compile(where)
|
aa2cb68eaf1a67cc633a9682a42aca9d94825ccecdc2faedd479c07d2a8ad250 | import collections.abc
import copy
import datetime
import decimal
import math
import operator
import uuid
import warnings
from base64 import b64decode, b64encode
from functools import partialmethod, total_ordering
from django import forms
from django.apps import apps
from django.conf import settings
from django.core import checks, exceptions, validators
from django.db import connection, connections, router
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin
from django.utils import timezone
from django.utils.datastructures import DictWrapper
from django.utils.dateparse import (
parse_date,
parse_datetime,
parse_duration,
parse_time,
)
from django.utils.duration import duration_microseconds, duration_string
from django.utils.functional import Promise, cached_property
from django.utils.ipv6 import clean_ipv6_address
from django.utils.itercompat import is_iterable
from django.utils.text import capfirst
from django.utils.translation import gettext_lazy as _
__all__ = [
"AutoField",
"BLANK_CHOICE_DASH",
"BigAutoField",
"BigIntegerField",
"BinaryField",
"BooleanField",
"CharField",
"CommaSeparatedIntegerField",
"DateField",
"DateTimeField",
"DecimalField",
"DurationField",
"EmailField",
"Empty",
"Field",
"FilePathField",
"FloatField",
"GenericIPAddressField",
"IPAddressField",
"IntegerField",
"NOT_PROVIDED",
"NullBooleanField",
"PositiveBigIntegerField",
"PositiveIntegerField",
"PositiveSmallIntegerField",
"SlugField",
"SmallAutoField",
"SmallIntegerField",
"TextField",
"TimeField",
"URLField",
"UUIDField",
]
class Empty:
pass
class NOT_PROVIDED:
pass
# The values to use for "blank" in SelectFields. Will be appended to the start
# of most "choices" lists.
BLANK_CHOICE_DASH = [("", "---------")]
def _load_field(app_label, model_name, field_name):
return apps.get_model(app_label, model_name)._meta.get_field(field_name)
# A guide to Field parameters:
#
# * name: The name of the field specified in the model.
# * attname: The attribute to use on the model object. This is the same as
# "name", except in the case of ForeignKeys, where "_id" is
# appended.
# * db_column: The db_column specified in the model (or None).
# * column: The database column for this field. This is the same as
# "attname", except if db_column is specified.
#
# Code that introspects values, or does other dynamic things, should use
# attname. For example, this gets the primary key value of object "obj":
#
# getattr(obj, opts.pk.attname)
def _empty(of_cls):
new = Empty()
new.__class__ = of_cls
return new
def return_None():
return None
@total_ordering
class Field(RegisterLookupMixin):
"""Base class for all field types"""
# Designates whether empty strings fundamentally are allowed at the
# database level.
empty_strings_allowed = True
empty_values = list(validators.EMPTY_VALUES)
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that Django implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
default_validators = [] # Default set of validators
default_error_messages = {
"invalid_choice": _("Value %(value)r is not a valid choice."),
"null": _("This field cannot be null."),
"blank": _("This field cannot be blank."),
"unique": _("%(model_name)s with this %(field_label)s already exists."),
# Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
# Eg: "Title must be unique for pub_date year"
"unique_for_date": _(
"%(field_label)s must be unique for "
"%(date_field_label)s %(lookup_type)s."
),
}
system_check_deprecated_details = None
system_check_removed_details = None
# Attributes that don't affect a column definition.
# These attributes are ignored when altering the field.
non_db_attrs = (
"blank",
"choices",
"db_column",
"editable",
"error_messages",
"help_text",
"limit_choices_to",
# Database-level options are not supported, see #21961.
"on_delete",
"related_name",
"related_query_name",
"validators",
"verbose_name",
)
# Field flags
hidden = False
many_to_many = None
many_to_one = None
one_to_many = None
one_to_one = None
related_model = None
descriptor_class = DeferredAttribute
# Generic field type description, usually overridden by subclasses
def _description(self):
return _("Field of type: %(field_type)s") % {
"field_type": self.__class__.__name__
}
description = property(_description)
def __init__(
self,
verbose_name=None,
name=None,
primary_key=False,
max_length=None,
unique=False,
blank=False,
null=False,
db_index=False,
rel=None,
default=NOT_PROVIDED,
editable=True,
serialize=True,
unique_for_date=None,
unique_for_month=None,
unique_for_year=None,
choices=None,
help_text="",
db_column=None,
db_tablespace=None,
auto_created=False,
validators=(),
error_messages=None,
):
self.name = name
self.verbose_name = verbose_name # May be set by set_attributes_from_name
self._verbose_name = verbose_name # Store original for deconstruction
self.primary_key = primary_key
self.max_length, self._unique = max_length, unique
self.blank, self.null = blank, null
self.remote_field = rel
self.is_relation = self.remote_field is not None
self.default = default
self.editable = editable
self.serialize = serialize
self.unique_for_date = unique_for_date
self.unique_for_month = unique_for_month
self.unique_for_year = unique_for_year
if isinstance(choices, collections.abc.Iterator):
choices = list(choices)
self.choices = choices
self.help_text = help_text
self.db_index = db_index
self.db_column = db_column
self._db_tablespace = db_tablespace
self.auto_created = auto_created
# Adjust the appropriate creation counter, and save our local copy.
if auto_created:
self.creation_counter = Field.auto_creation_counter
Field.auto_creation_counter -= 1
else:
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
self._validators = list(validators) # Store for deconstruction later
self._error_messages = error_messages # Store for deconstruction later
def __str__(self):
"""
Return "app_label.model_label.field_name" for fields attached to
models.
"""
if not hasattr(self, "model"):
return super().__str__()
model = self.model
return "%s.%s" % (model._meta.label, self.name)
def __repr__(self):
"""Display the module, class, and name of the field."""
path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__)
name = getattr(self, "name", None)
if name is not None:
return "<%s: %s>" % (path, name)
return "<%s>" % path
def check(self, **kwargs):
return [
*self._check_field_name(),
*self._check_choices(),
*self._check_db_index(),
*self._check_null_allowed_for_primary_keys(),
*self._check_backend_specific_checks(**kwargs),
*self._check_validators(),
*self._check_deprecation_details(),
]
def _check_field_name(self):
"""
Check if field name is valid, i.e. 1) does not end with an
underscore, 2) does not contain "__" and 3) is not "pk".
"""
if self.name.endswith("_"):
return [
checks.Error(
"Field names must not end with an underscore.",
obj=self,
id="fields.E001",
)
]
elif LOOKUP_SEP in self.name:
return [
checks.Error(
'Field names must not contain "%s".' % LOOKUP_SEP,
obj=self,
id="fields.E002",
)
]
elif self.name == "pk":
return [
checks.Error(
"'pk' is a reserved word that cannot be used as a field name.",
obj=self,
id="fields.E003",
)
]
else:
return []
@classmethod
def _choices_is_value(cls, value):
return isinstance(value, (str, Promise)) or not is_iterable(value)
def _check_choices(self):
if not self.choices:
return []
if not is_iterable(self.choices) or isinstance(self.choices, str):
return [
checks.Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=self,
id="fields.E004",
)
]
choice_max_length = 0
# Expect [group_name, [value, display]]
for choices_group in self.choices:
try:
group_name, group_choices = choices_group
except (TypeError, ValueError):
# Containing non-pairs
break
try:
if not all(
self._choices_is_value(value) and self._choices_is_value(human_name)
for value, human_name in group_choices
):
break
if self.max_length is not None and group_choices:
choice_max_length = max(
[
choice_max_length,
*(
len(value)
for value, _ in group_choices
if isinstance(value, str)
),
]
)
except (TypeError, ValueError):
# No groups, choices in the form [value, display]
value, human_name = group_name, group_choices
if not self._choices_is_value(value) or not self._choices_is_value(
human_name
):
break
if self.max_length is not None and isinstance(value, str):
choice_max_length = max(choice_max_length, len(value))
# Special case: choices=['ab']
if isinstance(choices_group, str):
break
else:
if self.max_length is not None and choice_max_length > self.max_length:
return [
checks.Error(
"'max_length' is too small to fit the longest value "
"in 'choices' (%d characters)." % choice_max_length,
obj=self,
id="fields.E009",
),
]
return []
return [
checks.Error(
"'choices' must be an iterable containing "
"(actual value, human readable name) tuples.",
obj=self,
id="fields.E005",
)
]
def _check_db_index(self):
if self.db_index not in (None, True, False):
return [
checks.Error(
"'db_index' must be None, True or False.",
obj=self,
id="fields.E006",
)
]
else:
return []
def _check_null_allowed_for_primary_keys(self):
if (
self.primary_key
and self.null
and not connection.features.interprets_empty_strings_as_nulls
):
# We cannot reliably check this for backends like Oracle which
# consider NULL and '' to be equal (and thus set up
# character-based fields a little differently).
return [
checks.Error(
"Primary keys must not have null=True.",
hint=(
"Set null=False on the field, or "
"remove primary_key=True argument."
),
obj=self,
id="fields.E007",
)
]
else:
return []
def _check_backend_specific_checks(self, databases=None, **kwargs):
if databases is None:
return []
app_label = self.model._meta.app_label
errors = []
for alias in databases:
if router.allow_migrate(
alias, app_label, model_name=self.model._meta.model_name
):
errors.extend(connections[alias].validation.check_field(self, **kwargs))
return errors
def _check_validators(self):
errors = []
for i, validator in enumerate(self.validators):
if not callable(validator):
errors.append(
checks.Error(
"All 'validators' must be callable.",
hint=(
"validators[{i}] ({repr}) isn't a function or "
"instance of a validator class.".format(
i=i,
repr=repr(validator),
)
),
obj=self,
id="fields.E008",
)
)
return errors
def _check_deprecation_details(self):
if self.system_check_removed_details is not None:
return [
checks.Error(
self.system_check_removed_details.get(
"msg",
"%s has been removed except for support in historical "
"migrations." % self.__class__.__name__,
),
hint=self.system_check_removed_details.get("hint"),
obj=self,
id=self.system_check_removed_details.get("id", "fields.EXXX"),
)
]
elif self.system_check_deprecated_details is not None:
return [
checks.Warning(
self.system_check_deprecated_details.get(
"msg", "%s has been deprecated." % self.__class__.__name__
),
hint=self.system_check_deprecated_details.get("hint"),
obj=self,
id=self.system_check_deprecated_details.get("id", "fields.WXXX"),
)
]
return []
def get_col(self, alias, output_field=None):
if alias == self.model._meta.db_table and (
output_field is None or output_field == self
):
return self.cached_col
from django.db.models.expressions import Col
return Col(alias, self, output_field)
@cached_property
def cached_col(self):
from django.db.models.expressions import Col
return Col(self.model._meta.db_table, self)
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, GIS columns need to be
selected as AsText(table.col) on MySQL as the table.col data can't be
used by Django.
"""
return sql, params
def deconstruct(self):
"""
Return enough information to recreate the field as a 4-tuple:
* The name of the field on the model, if contribute_to_class() has
been run.
* The import path of the field, including the class, e.g.
django.db.models.IntegerField. This should be the most portable
version, so less specific may be better.
* A list of positional arguments.
* A dict of keyword arguments.
Note that the positional or keyword arguments must contain values of
the following types (including inner values of collection types):
* None, bool, str, int, float, complex, set, frozenset, list, tuple,
dict
* UUID
* datetime.datetime (naive), datetime.date
* top-level classes, top-level functions - will be referenced by their
full import path
* Storage instances - these have their own deconstruct() method
This is because the values here must be serialized into a text format
(possibly new Python code, possibly JSON) and these are the only types
with encoding handlers defined.
There's no need to return the exact way the field was instantiated this
time, just ensure that the resulting field is the same - prefer keyword
arguments over positional ones, and omit parameters with their default
values.
"""
# Short-form way of fetching all the default parameters
keywords = {}
possibles = {
"verbose_name": None,
"primary_key": False,
"max_length": None,
"unique": False,
"blank": False,
"null": False,
"db_index": False,
"default": NOT_PROVIDED,
"editable": True,
"serialize": True,
"unique_for_date": None,
"unique_for_month": None,
"unique_for_year": None,
"choices": None,
"help_text": "",
"db_column": None,
"db_tablespace": None,
"auto_created": False,
"validators": [],
"error_messages": None,
}
attr_overrides = {
"unique": "_unique",
"error_messages": "_error_messages",
"validators": "_validators",
"verbose_name": "_verbose_name",
"db_tablespace": "_db_tablespace",
}
equals_comparison = {"choices", "validators"}
for name, default in possibles.items():
value = getattr(self, attr_overrides.get(name, name))
# Unroll anything iterable for choices into a concrete list
if name == "choices" and isinstance(value, collections.abc.Iterable):
value = list(value)
# Do correct kind of comparison
if name in equals_comparison:
if value != default:
keywords[name] = value
else:
if value is not default:
keywords[name] = value
# Work out path - we shorten it for known Django core fields
path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__)
if path.startswith("django.db.models.fields.related"):
path = path.replace("django.db.models.fields.related", "django.db.models")
elif path.startswith("django.db.models.fields.files"):
path = path.replace("django.db.models.fields.files", "django.db.models")
elif path.startswith("django.db.models.fields.json"):
path = path.replace("django.db.models.fields.json", "django.db.models")
elif path.startswith("django.db.models.fields.proxy"):
path = path.replace("django.db.models.fields.proxy", "django.db.models")
elif path.startswith("django.db.models.fields"):
path = path.replace("django.db.models.fields", "django.db.models")
# Return basic info - other fields should override this.
return (self.name, path, [], keywords)
def clone(self):
"""
Uses deconstruct() to clone a new copy of this Field.
Will not preserve any class attachments/attribute names.
"""
name, path, args, kwargs = self.deconstruct()
return self.__class__(*args, **kwargs)
def __eq__(self, other):
# Needed for @total_ordering
if isinstance(other, Field):
return self.creation_counter == other.creation_counter and getattr(
self, "model", None
) == getattr(other, "model", None)
return NotImplemented
def __lt__(self, other):
# This is needed because bisect does not take a comparison function.
# Order by creation_counter first for backward compatibility.
if isinstance(other, Field):
if (
self.creation_counter != other.creation_counter
or not hasattr(self, "model")
and not hasattr(other, "model")
):
return self.creation_counter < other.creation_counter
elif hasattr(self, "model") != hasattr(other, "model"):
return not hasattr(self, "model") # Order no-model fields first
else:
# creation_counter's are equal, compare only models.
return (self.model._meta.app_label, self.model._meta.model_name) < (
other.model._meta.app_label,
other.model._meta.model_name,
)
return NotImplemented
def __hash__(self):
return hash(self.creation_counter)
def __deepcopy__(self, memodict):
# We don't have to deepcopy very much here, since most things are not
# intended to be altered after initial creation.
obj = copy.copy(self)
if self.remote_field:
obj.remote_field = copy.copy(self.remote_field)
if hasattr(self.remote_field, "field") and self.remote_field.field is self:
obj.remote_field.field = obj
memodict[id(self)] = obj
return obj
def __copy__(self):
# We need to avoid hitting __reduce__, so define this
# slightly weird copy construct.
obj = Empty()
obj.__class__ = self.__class__
obj.__dict__ = self.__dict__.copy()
return obj
def __reduce__(self):
"""
Pickling should return the model._meta.fields instance of the field,
not a new copy of that field. So, use the app registry to load the
model and then the field back.
"""
if not hasattr(self, "model"):
# Fields are sometimes used without attaching them to models (for
# example in aggregation). In this case give back a plain field
# instance. The code below will create a new empty instance of
# class self.__class__, then update its dict with self.__dict__
# values - so, this is very close to normal pickle.
state = self.__dict__.copy()
# The _get_default cached_property can't be pickled due to lambda
# usage.
state.pop("_get_default", None)
return _empty, (self.__class__,), state
return _load_field, (
self.model._meta.app_label,
self.model._meta.object_name,
self.name,
)
def get_pk_value_on_save(self, instance):
"""
Hook to generate new PK values on save. This method is called when
saving instances with no primary key value set. If this method returns
something else than None, then the returned value is used when saving
the new instance.
"""
if self.default:
return self.get_default()
return None
def to_python(self, value):
"""
Convert the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Return the converted value. Subclasses should override this.
"""
return value
@cached_property
def error_messages(self):
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, "default_error_messages", {}))
messages.update(self._error_messages or {})
return messages
@cached_property
def validators(self):
"""
Some validators can't be created at field initialization time.
This method provides a way to delay their creation until required.
"""
return [*self.default_validators, *self._validators]
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except exceptions.ValidationError as e:
if hasattr(e, "code") and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise exceptions.ValidationError(errors)
def validate(self, value, model_instance):
"""
Validate value and raise ValidationError if necessary. Subclasses
should override this to provide validation logic.
"""
if not self.editable:
# Skip validation for non-editable fields.
return
if self.choices is not None and value not in self.empty_values:
for option_key, option_value in self.choices:
if isinstance(option_value, (list, tuple)):
# This is an optgroup, so look inside the group for
# options.
for optgroup_key, optgroup_value in option_value:
if value == optgroup_key:
return
elif value == option_key:
return
raise exceptions.ValidationError(
self.error_messages["invalid_choice"],
code="invalid_choice",
params={"value": value},
)
if value is None and not self.null:
raise exceptions.ValidationError(self.error_messages["null"], code="null")
if not self.blank and value in self.empty_values:
raise exceptions.ValidationError(self.error_messages["blank"], code="blank")
def clean(self, value, model_instance):
"""
Convert the value's type and run validation. Validation errors
from to_python() and validate() are propagated. Return the correct
value if no error is raised.
"""
value = self.to_python(value)
self.validate(value, model_instance)
self.run_validators(value)
return value
def db_type_parameters(self, connection):
return DictWrapper(self.__dict__, connection.ops.quote_name, "qn_")
def db_check(self, connection):
"""
Return the database column check constraint for this field, for the
provided connection. Works the same way as db_type() for the case that
get_internal_type() does not map to a preexisting model field.
"""
data = self.db_type_parameters(connection)
try:
return (
connection.data_type_check_constraints[self.get_internal_type()] % data
)
except KeyError:
return None
def db_type(self, connection):
"""
Return the database column data type for this field, for the provided
connection.
"""
# The default implementation of this method looks at the
# backend-specific data_types dictionary, looking up the field by its
# "internal type".
#
# A Field class can implement the get_internal_type() method to specify
# which *preexisting* Django Field class it's most similar to -- i.e.,
# a custom field might be represented by a TEXT column type, which is
# the same as the TextField Django field type, which means the custom
# field's get_internal_type() returns 'TextField'.
#
# But the limitation of the get_internal_type() / data_types approach
# is that it cannot handle database column types that aren't already
# mapped to one of the built-in Django field types. In this case, you
# can implement db_type() instead of get_internal_type() to specify
# exactly which wacky database column type you want to use.
data = self.db_type_parameters(connection)
try:
return connection.data_types[self.get_internal_type()] % data
except KeyError:
return None
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. For example, this method is called by ForeignKey and OneToOneField
to determine its data type.
"""
return self.db_type(connection)
def cast_db_type(self, connection):
"""Return the data type to use in the Cast() function."""
db_type = connection.ops.cast_data_types.get(self.get_internal_type())
if db_type:
return db_type % self.db_type_parameters(connection)
return self.db_type(connection)
def db_parameters(self, connection):
"""
Extension of db_type(), providing a range of different return values
(type, checks). This will look at db_type(), allowing custom model
fields to override it.
"""
type_string = self.db_type(connection)
check_string = self.db_check(connection)
return {
"type": type_string,
"check": check_string,
}
def db_type_suffix(self, connection):
return connection.data_types_suffix.get(self.get_internal_type())
def get_db_converters(self, connection):
if hasattr(self, "from_db_value"):
return [self.from_db_value]
return []
@property
def unique(self):
return self._unique or self.primary_key
@property
def db_tablespace(self):
return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
@property
def db_returning(self):
"""
Private API intended only to be used by Django itself. Currently only
the PostgreSQL backend supports returning multiple fields on a model.
"""
return False
def set_attributes_from_name(self, name):
self.name = self.name or name
self.attname, self.column = self.get_attname_column()
self.concrete = self.column is not None
if self.verbose_name is None and self.name:
self.verbose_name = self.name.replace("_", " ")
def contribute_to_class(self, cls, name, private_only=False):
"""
Register the field with the model class it belongs to.
If private_only is True, create a separate instance of this field
for every subclass of cls, even if cls is not an abstract model.
"""
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self, private=private_only)
if self.column:
setattr(cls, self.attname, self.descriptor_class(self))
if self.choices is not None:
# Don't override a get_FOO_display() method defined explicitly on
# this class, but don't check methods derived from inheritance, to
# allow overriding inherited choices. For more complex inheritance
# structures users should override contribute_to_class().
if "get_%s_display" % self.name not in cls.__dict__:
setattr(
cls,
"get_%s_display" % self.name,
partialmethod(cls._get_FIELD_display, field=self),
)
def get_filter_kwargs_for_object(self, obj):
"""
Return a dict that when passed as kwargs to self.model.filter(), would
yield all instances having the same value for this field as obj has.
"""
return {self.name: getattr(obj, self.attname)}
def get_attname(self):
return self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_internal_type(self):
return self.__class__.__name__
def pre_save(self, model_instance, add):
"""Return field's value just before saving."""
return getattr(model_instance, self.attname)
def get_prep_value(self, value):
"""Perform preliminary non-db specific value checks and conversions."""
if isinstance(value, Promise):
value = value._proxy____cast()
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""
Return field's value prepared for interacting with the database backend.
Used by the default implementations of get_db_prep_save().
"""
if not prepared:
value = self.get_prep_value(value)
return value
def get_db_prep_save(self, value, connection):
"""Return field's value prepared for saving into a database."""
return self.get_db_prep_value(value, connection=connection, prepared=False)
def has_default(self):
"""Return a boolean of whether this field has a default value."""
return self.default is not NOT_PROVIDED
def get_default(self):
"""Return the default value for this field."""
return self._get_default()
@cached_property
def _get_default(self):
if self.has_default():
if callable(self.default):
return self.default
return lambda: self.default
if (
not self.empty_strings_allowed
or self.null
and not connection.features.interprets_empty_strings_as_nulls
):
return return_None
return str # return empty string
def get_choices(
self,
include_blank=True,
blank_choice=BLANK_CHOICE_DASH,
limit_choices_to=None,
ordering=(),
):
"""
Return choices with a default blank choices included, for use
as <select> choices for this field.
"""
if self.choices is not None:
choices = list(self.choices)
if include_blank:
blank_defined = any(
choice in ("", None) for choice, _ in self.flatchoices
)
if not blank_defined:
choices = blank_choice + choices
return choices
rel_model = self.remote_field.model
limit_choices_to = limit_choices_to or self.get_limit_choices_to()
choice_func = operator.attrgetter(
self.remote_field.get_related_field().attname
if hasattr(self.remote_field, "get_related_field")
else "pk"
)
qs = rel_model._default_manager.complex_filter(limit_choices_to)
if ordering:
qs = qs.order_by(*ordering)
return (blank_choice if include_blank else []) + [
(choice_func(x), str(x)) for x in qs
]
def value_to_string(self, obj):
"""
Return a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return str(self.value_from_object(obj))
def _get_flatchoices(self):
"""Flattened version of choices tuple."""
if self.choices is None:
return []
flat = []
for choice, value in self.choices:
if isinstance(value, (list, tuple)):
flat.extend(value)
else:
flat.append((choice, value))
return flat
flatchoices = property(_get_flatchoices)
def save_form_data(self, instance, data):
setattr(instance, self.name, data)
def formfield(self, form_class=None, choices_form_class=None, **kwargs):
"""Return a django.forms.Field instance for this field."""
defaults = {
"required": not self.blank,
"label": capfirst(self.verbose_name),
"help_text": self.help_text,
}
if self.has_default():
if callable(self.default):
defaults["initial"] = self.default
defaults["show_hidden_initial"] = True
else:
defaults["initial"] = self.get_default()
if self.choices is not None:
# Fields with choices get special treatment.
include_blank = self.blank or not (
self.has_default() or "initial" in kwargs
)
defaults["choices"] = self.get_choices(include_blank=include_blank)
defaults["coerce"] = self.to_python
if self.null:
defaults["empty_value"] = None
if choices_form_class is not None:
form_class = choices_form_class
else:
form_class = forms.TypedChoiceField
# Many of the subclass-specific formfield arguments (min_value,
# max_value) don't apply for choice fields, so be sure to only pass
# the values that TypedChoiceField will understand.
for k in list(kwargs):
if k not in (
"coerce",
"empty_value",
"choices",
"required",
"widget",
"label",
"initial",
"help_text",
"error_messages",
"show_hidden_initial",
"disabled",
):
del kwargs[k]
defaults.update(kwargs)
if form_class is None:
form_class = forms.CharField
return form_class(**defaults)
def value_from_object(self, obj):
"""Return the value of this field in the given model instance."""
return getattr(obj, self.attname)
class BooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be either True or False."),
"invalid_nullable": _("“%(value)s” value must be either True, False, or None."),
}
description = _("Boolean (Either True or False)")
def get_internal_type(self):
return "BooleanField"
def to_python(self, value):
if self.null and value in self.empty_values:
return None
if value in (True, False):
# 1/0 are equal to True/False. bool() converts former to latter.
return bool(value)
if value in ("t", "True", "1"):
return True
if value in ("f", "False", "0"):
return False
raise exceptions.ValidationError(
self.error_messages["invalid_nullable" if self.null else "invalid"],
code="invalid",
params={"value": value},
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return self.to_python(value)
def formfield(self, **kwargs):
if self.choices is not None:
include_blank = not (self.has_default() or "initial" in kwargs)
defaults = {"choices": self.get_choices(include_blank=include_blank)}
else:
form_class = forms.NullBooleanField if self.null else forms.BooleanField
# In HTML checkboxes, 'required' means "must be checked" which is
# different from the choices case ("must select some value").
# required=False allows unchecked checkboxes.
defaults = {"form_class": form_class, "required": False}
return super().formfield(**{**defaults, **kwargs})
def select_format(self, compiler, sql, params):
sql, params = super().select_format(compiler, sql, params)
# Filters that match everything are handled as empty strings in the
# WHERE clause, but in SELECT or GROUP BY list they must use a
# predicate that's always True.
if sql == "":
sql = "1"
return sql, params
class CharField(Field):
description = _("String (up to %(max_length)s)")
def __init__(self, *args, db_collation=None, **kwargs):
super().__init__(*args, **kwargs)
self.db_collation = db_collation
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
databases = kwargs.get("databases") or []
return [
*super().check(**kwargs),
*self._check_db_collation(databases),
*self._check_max_length_attribute(**kwargs),
]
def _check_max_length_attribute(self, **kwargs):
if self.max_length is None:
return [
checks.Error(
"CharFields must define a 'max_length' attribute.",
obj=self,
id="fields.E120",
)
]
elif (
not isinstance(self.max_length, int)
or isinstance(self.max_length, bool)
or self.max_length <= 0
):
return [
checks.Error(
"'max_length' must be a positive integer.",
obj=self,
id="fields.E121",
)
]
else:
return []
def _check_db_collation(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
self.db_collation is None
or "supports_collation_on_charfield"
in self.model._meta.required_db_features
or connection.features.supports_collation_on_charfield
):
errors.append(
checks.Error(
"%s does not support a database collation on "
"CharFields." % connection.display_name,
obj=self,
id="fields.E190",
),
)
return errors
def cast_db_type(self, connection):
if self.max_length is None:
return connection.ops.cast_char_field_without_max_length
return super().cast_db_type(connection)
def db_parameters(self, connection):
db_params = super().db_parameters(connection)
db_params["collation"] = self.db_collation
return db_params
def get_internal_type(self):
return "CharField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {"max_length": self.max_length}
# TODO: Handle multiple backends with different feature flags.
if self.null and not connection.features.interprets_empty_strings_as_nulls:
defaults["empty_value"] = None
defaults.update(kwargs)
return super().formfield(**defaults)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.db_collation:
kwargs["db_collation"] = self.db_collation
return name, path, args, kwargs
class CommaSeparatedIntegerField(CharField):
default_validators = [validators.validate_comma_separated_integer_list]
description = _("Comma-separated integers")
system_check_removed_details = {
"msg": (
"CommaSeparatedIntegerField is removed except for support in "
"historical migrations."
),
"hint": (
"Use CharField(validators=[validate_comma_separated_integer_list]) "
"instead."
),
"id": "fields.E901",
}
def _to_naive(value):
if timezone.is_aware(value):
value = timezone.make_naive(value, datetime.timezone.utc)
return value
def _get_naive_now():
return _to_naive(timezone.now())
class DateTimeCheckMixin:
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_mutually_exclusive_options(),
*self._check_fix_default_value(),
]
def _check_mutually_exclusive_options(self):
# auto_now, auto_now_add, and default are mutually exclusive
# options. The use of more than one of these options together
# will trigger an Error
mutually_exclusive_options = [
self.auto_now_add,
self.auto_now,
self.has_default(),
]
enabled_options = [
option not in (None, False) for option in mutually_exclusive_options
].count(True)
if enabled_options > 1:
return [
checks.Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=self,
id="fields.E160",
)
]
else:
return []
def _check_fix_default_value(self):
return []
# Concrete subclasses use this in their implementations of
# _check_fix_default_value().
def _check_if_value_fixed(self, value, now=None):
"""
Check if the given value appears to have been provided as a "fixed"
time value, and include a warning in the returned list if it does. The
value argument must be a date object or aware/naive datetime object. If
now is provided, it must be a naive datetime object.
"""
if now is None:
now = _get_naive_now()
offset = datetime.timedelta(seconds=10)
lower = now - offset
upper = now + offset
if isinstance(value, datetime.datetime):
value = _to_naive(value)
else:
assert isinstance(value, datetime.date)
lower = lower.date()
upper = upper.date()
if lower <= value <= upper:
return [
checks.Warning(
"Fixed default value provided.",
hint=(
"It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`"
),
obj=self,
id="fields.W161",
)
]
return []
class DateField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid date format. It must be "
"in YYYY-MM-DD format."
),
"invalid_date": _(
"“%(value)s” value has the correct format (YYYY-MM-DD) "
"but it is an invalid date."
),
}
description = _("Date (without time)")
def __init__(
self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs
):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs["editable"] = False
kwargs["blank"] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, datetime.datetime):
value = _to_naive(value).date()
elif isinstance(value, datetime.date):
pass
else:
# No explicit date / datetime value -- no checks necessary
return []
# At this point, value is a date object.
return self._check_if_value_fixed(value)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now:
kwargs["auto_now"] = True
if self.auto_now_add:
kwargs["auto_now_add"] = True
if self.auto_now or self.auto_now_add:
del kwargs["editable"]
del kwargs["blank"]
return name, path, args, kwargs
def get_internal_type(self):
return "DateField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
if settings.USE_TZ and timezone.is_aware(value):
# Convert aware datetimes to the default time zone
# before casting them to dates (#17742).
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
return value.date()
if isinstance(value, datetime.date):
return value
try:
parsed = parse_date(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_date"],
code="invalid_date",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.date.today()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def contribute_to_class(self, cls, name, **kwargs):
super().contribute_to_class(cls, name, **kwargs)
if not self.null:
setattr(
cls,
"get_next_by_%s" % self.name,
partialmethod(
cls._get_next_or_previous_by_FIELD, field=self, is_next=True
),
)
setattr(
cls,
"get_previous_by_%s" % self.name,
partialmethod(
cls._get_next_or_previous_by_FIELD, field=self, is_next=False
),
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts dates into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DateField,
**kwargs,
}
)
class DateTimeField(DateField):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."
),
"invalid_date": _(
"“%(value)s” value has the correct format "
"(YYYY-MM-DD) but it is an invalid date."
),
"invalid_datetime": _(
"“%(value)s” value has the correct format "
"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
"but it is an invalid date/time."
),
}
description = _("Date (with time)")
# __init__ is inherited from DateField
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, (datetime.datetime, datetime.date)):
return self._check_if_value_fixed(value)
# No explicit date / datetime value -- no checks necessary.
return []
def get_internal_type(self):
return "DateTimeField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
if settings.USE_TZ:
# For backwards compatibility, interpret naive datetimes in
# local time. This won't work during DST change, but we can't
# do much about it, so we let the exceptions percolate up the
# call stack.
warnings.warn(
"DateTimeField %s.%s received a naive datetime "
"(%s) while time zone support is active."
% (self.model.__name__, self.name, value),
RuntimeWarning,
)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
try:
parsed = parse_datetime(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_datetime"],
code="invalid_datetime",
params={"value": value},
)
try:
parsed = parse_date(value)
if parsed is not None:
return datetime.datetime(parsed.year, parsed.month, parsed.day)
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_date"],
code="invalid_date",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = timezone.now()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
# contribute_to_class is inherited from DateField, it registers
# get_next_by_FOO and get_prev_by_FOO
def get_prep_value(self, value):
value = super().get_prep_value(value)
value = self.to_python(value)
if value is not None and settings.USE_TZ and timezone.is_naive(value):
# For backwards compatibility, interpret naive datetimes in local
# time. This won't work during DST change, but we can't do much
# about it, so we let the exceptions percolate up the call stack.
try:
name = "%s.%s" % (self.model.__name__, self.name)
except AttributeError:
name = "(unbound)"
warnings.warn(
"DateTimeField %s received a naive datetime (%s)"
" while time zone support is active." % (name, value),
RuntimeWarning,
)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
def get_db_prep_value(self, value, connection, prepared=False):
# Casts datetimes into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datetimefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DateTimeField,
**kwargs,
}
)
class DecimalField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be a decimal number."),
}
description = _("Decimal number")
def __init__(
self,
verbose_name=None,
name=None,
max_digits=None,
decimal_places=None,
**kwargs,
):
self.max_digits, self.decimal_places = max_digits, decimal_places
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
digits_errors = [
*self._check_decimal_places(),
*self._check_max_digits(),
]
if not digits_errors:
errors.extend(self._check_decimal_places_and_max_digits(**kwargs))
else:
errors.extend(digits_errors)
return errors
def _check_decimal_places(self):
try:
decimal_places = int(self.decimal_places)
if decimal_places < 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=self,
id="fields.E130",
)
]
except ValueError:
return [
checks.Error(
"'decimal_places' must be a non-negative integer.",
obj=self,
id="fields.E131",
)
]
else:
return []
def _check_max_digits(self):
try:
max_digits = int(self.max_digits)
if max_digits <= 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=self,
id="fields.E132",
)
]
except ValueError:
return [
checks.Error(
"'max_digits' must be a positive integer.",
obj=self,
id="fields.E133",
)
]
else:
return []
def _check_decimal_places_and_max_digits(self, **kwargs):
if int(self.decimal_places) > int(self.max_digits):
return [
checks.Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=self,
id="fields.E134",
)
]
return []
@cached_property
def validators(self):
return super().validators + [
validators.DecimalValidator(self.max_digits, self.decimal_places)
]
@cached_property
def context(self):
return decimal.Context(prec=self.max_digits)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.max_digits is not None:
kwargs["max_digits"] = self.max_digits
if self.decimal_places is not None:
kwargs["decimal_places"] = self.decimal_places
return name, path, args, kwargs
def get_internal_type(self):
return "DecimalField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, float):
if math.isnan(value):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
return self.context.create_decimal_from_float(value)
try:
return decimal.Decimal(value)
except (decimal.InvalidOperation, TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def get_db_prep_save(self, value, connection):
return connection.ops.adapt_decimalfield_value(
self.to_python(value), self.max_digits, self.decimal_places
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"max_digits": self.max_digits,
"decimal_places": self.decimal_places,
"form_class": forms.DecimalField,
**kwargs,
}
)
class DurationField(Field):
"""
Store timedelta objects.
Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint
of microseconds on other databases.
"""
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"[DD] [[HH:]MM:]ss[.uuuuuu] format."
)
}
description = _("Duration")
def get_internal_type(self):
return "DurationField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.timedelta):
return value
try:
parsed = parse_duration(value)
except ValueError:
pass
else:
if parsed is not None:
return parsed
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def get_db_prep_value(self, value, connection, prepared=False):
if connection.features.has_native_duration_field:
return value
if value is None:
return None
return duration_microseconds(value)
def get_db_converters(self, connection):
converters = []
if not connection.features.has_native_duration_field:
converters.append(connection.ops.convert_durationfield_value)
return converters + super().get_db_converters(connection)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else duration_string(val)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DurationField,
**kwargs,
}
)
class EmailField(CharField):
default_validators = [validators.validate_email]
description = _("Email address")
def __init__(self, *args, **kwargs):
# max_length=254 to be compliant with RFCs 3696 and 5321
kwargs.setdefault("max_length", 254)
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# We do not exclude max_length if it matches default as we want to change
# the default in future.
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause email validation to be performed
# twice.
return super().formfield(
**{
"form_class": forms.EmailField,
**kwargs,
}
)
class FilePathField(Field):
description = _("File path")
def __init__(
self,
verbose_name=None,
name=None,
path="",
match=None,
recursive=False,
allow_files=True,
allow_folders=False,
**kwargs,
):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
kwargs.setdefault("max_length", 100)
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_allowing_files_or_folders(**kwargs),
]
def _check_allowing_files_or_folders(self, **kwargs):
if not self.allow_files and not self.allow_folders:
return [
checks.Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' "
"set to True.",
obj=self,
id="fields.E140",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.path != "":
kwargs["path"] = self.path
if self.match is not None:
kwargs["match"] = self.match
if self.recursive is not False:
kwargs["recursive"] = self.recursive
if self.allow_files is not True:
kwargs["allow_files"] = self.allow_files
if self.allow_folders is not False:
kwargs["allow_folders"] = self.allow_folders
if kwargs.get("max_length") == 100:
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"path": self.path() if callable(self.path) else self.path,
"match": self.match,
"recursive": self.recursive,
"form_class": forms.FilePathField,
"allow_files": self.allow_files,
"allow_folders": self.allow_folders,
**kwargs,
}
)
def get_internal_type(self):
return "FilePathField"
class FloatField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be a float."),
}
description = _("Floating point number")
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
try:
return float(value)
except (TypeError, ValueError) as e:
raise e.__class__(
"Field '%s' expected a number but got %r." % (self.name, value),
) from e
def get_internal_type(self):
return "FloatField"
def to_python(self, value):
if value is None:
return value
try:
return float(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.FloatField,
**kwargs,
}
)
class IntegerField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be an integer."),
}
description = _("Integer")
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_max_length_warning(),
]
def _check_max_length_warning(self):
if self.max_length is not None:
return [
checks.Warning(
"'max_length' is ignored when used with %s."
% self.__class__.__name__,
hint="Remove 'max_length' from field",
obj=self,
id="fields.W122",
)
]
return []
@cached_property
def validators(self):
# These validators can't be added at field initialization time since
# they're based on values retrieved from `connection`.
validators_ = super().validators
internal_type = self.get_internal_type()
min_value, max_value = connection.ops.integer_field_range(internal_type)
if min_value is not None and not any(
(
isinstance(validator, validators.MinValueValidator)
and (
validator.limit_value()
if callable(validator.limit_value)
else validator.limit_value
)
>= min_value
)
for validator in validators_
):
validators_.append(validators.MinValueValidator(min_value))
if max_value is not None and not any(
(
isinstance(validator, validators.MaxValueValidator)
and (
validator.limit_value()
if callable(validator.limit_value)
else validator.limit_value
)
<= max_value
)
for validator in validators_
):
validators_.append(validators.MaxValueValidator(max_value))
return validators_
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
try:
return int(value)
except (TypeError, ValueError) as e:
raise e.__class__(
"Field '%s' expected a number but got %r." % (self.name, value),
) from e
def get_internal_type(self):
return "IntegerField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.IntegerField,
**kwargs,
}
)
class BigIntegerField(IntegerField):
description = _("Big (8 byte) integer")
MAX_BIGINT = 9223372036854775807
def get_internal_type(self):
return "BigIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": -BigIntegerField.MAX_BIGINT - 1,
"max_value": BigIntegerField.MAX_BIGINT,
**kwargs,
}
)
class SmallIntegerField(IntegerField):
description = _("Small integer")
def get_internal_type(self):
return "SmallIntegerField"
class IPAddressField(Field):
empty_strings_allowed = False
description = _("IPv4 address")
system_check_removed_details = {
"msg": (
"IPAddressField has been removed except for support in "
"historical migrations."
),
"hint": "Use GenericIPAddressField instead.",
"id": "fields.E900",
}
def __init__(self, *args, **kwargs):
kwargs["max_length"] = 15
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def get_internal_type(self):
return "IPAddressField"
class GenericIPAddressField(Field):
empty_strings_allowed = False
description = _("IP address")
default_error_messages = {}
def __init__(
self,
verbose_name=None,
name=None,
protocol="both",
unpack_ipv4=False,
*args,
**kwargs,
):
self.unpack_ipv4 = unpack_ipv4
self.protocol = protocol
(
self.default_validators,
invalid_error_message,
) = validators.ip_address_validators(protocol, unpack_ipv4)
self.default_error_messages["invalid"] = invalid_error_message
kwargs["max_length"] = 39
super().__init__(verbose_name, name, *args, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_blank_and_null_values(**kwargs),
]
def _check_blank_and_null_values(self, **kwargs):
if not getattr(self, "null", False) and getattr(self, "blank", False):
return [
checks.Error(
"GenericIPAddressFields cannot have blank=True if null=False, "
"as blank values are stored as nulls.",
obj=self,
id="fields.E150",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.unpack_ipv4 is not False:
kwargs["unpack_ipv4"] = self.unpack_ipv4
if self.protocol != "both":
kwargs["protocol"] = self.protocol
if kwargs.get("max_length") == 39:
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return "GenericIPAddressField"
def to_python(self, value):
if value is None:
return None
if not isinstance(value, str):
value = str(value)
value = value.strip()
if ":" in value:
return clean_ipv6_address(
value, self.unpack_ipv4, self.error_messages["invalid"]
)
return value
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_ipaddressfield_value(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
if value and ":" in value:
try:
return clean_ipv6_address(value, self.unpack_ipv4)
except exceptions.ValidationError:
pass
return str(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"protocol": self.protocol,
"form_class": forms.GenericIPAddressField,
**kwargs,
}
)
class NullBooleanField(BooleanField):
default_error_messages = {
"invalid": _("“%(value)s” value must be either None, True or False."),
"invalid_nullable": _("“%(value)s” value must be either None, True or False."),
}
description = _("Boolean (Either True, False or None)")
system_check_removed_details = {
"msg": (
"NullBooleanField is removed except for support in historical "
"migrations."
),
"hint": "Use BooleanField(null=True) instead.",
"id": "fields.E903",
}
def __init__(self, *args, **kwargs):
kwargs["null"] = True
kwargs["blank"] = True
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["null"]
del kwargs["blank"]
return name, path, args, kwargs
class PositiveIntegerRelDbTypeMixin:
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
if not hasattr(cls, "integer_field_class"):
cls.integer_field_class = next(
(
parent
for parent in cls.__mro__[1:]
if issubclass(parent, IntegerField)
),
None,
)
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. In most cases, a foreign key pointing to a positive integer
primary key will have an integer column data type but some databases
(e.g. MySQL) have an unsigned integer type. In that case
(related_fields_match_type=True), the primary key should return its
db_type.
"""
if connection.features.related_fields_match_type:
return self.db_type(connection)
else:
return self.integer_field_class().db_type(connection=connection)
class PositiveBigIntegerField(PositiveIntegerRelDbTypeMixin, BigIntegerField):
description = _("Positive big integer")
def get_internal_type(self):
return "PositiveBigIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
description = _("Positive integer")
def get_internal_type(self):
return "PositiveIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, SmallIntegerField):
description = _("Positive small integer")
def get_internal_type(self):
return "PositiveSmallIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class SlugField(CharField):
default_validators = [validators.validate_slug]
description = _("Slug (up to %(max_length)s)")
def __init__(
self, *args, max_length=50, db_index=True, allow_unicode=False, **kwargs
):
self.allow_unicode = allow_unicode
if self.allow_unicode:
self.default_validators = [validators.validate_unicode_slug]
super().__init__(*args, max_length=max_length, db_index=db_index, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 50:
del kwargs["max_length"]
if self.db_index is False:
kwargs["db_index"] = False
else:
del kwargs["db_index"]
if self.allow_unicode is not False:
kwargs["allow_unicode"] = self.allow_unicode
return name, path, args, kwargs
def get_internal_type(self):
return "SlugField"
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.SlugField,
"allow_unicode": self.allow_unicode,
**kwargs,
}
)
class TextField(Field):
description = _("Text")
def __init__(self, *args, db_collation=None, **kwargs):
super().__init__(*args, **kwargs)
self.db_collation = db_collation
def check(self, **kwargs):
databases = kwargs.get("databases") or []
return [
*super().check(**kwargs),
*self._check_db_collation(databases),
]
def _check_db_collation(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
self.db_collation is None
or "supports_collation_on_textfield"
in self.model._meta.required_db_features
or connection.features.supports_collation_on_textfield
):
errors.append(
checks.Error(
"%s does not support a database collation on "
"TextFields." % connection.display_name,
obj=self,
id="fields.E190",
),
)
return errors
def db_parameters(self, connection):
db_params = super().db_parameters(connection)
db_params["collation"] = self.db_collation
return db_params
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
return super().formfield(
**{
"max_length": self.max_length,
**({} if self.choices is not None else {"widget": forms.Textarea}),
**kwargs,
}
)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.db_collation:
kwargs["db_collation"] = self.db_collation
return name, path, args, kwargs
class TimeField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"HH:MM[:ss[.uuuuuu]] format."
),
"invalid_time": _(
"“%(value)s” value has the correct format "
"(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."
),
}
description = _("Time")
def __init__(
self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs
):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs["editable"] = False
kwargs["blank"] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, datetime.datetime):
now = None
elif isinstance(value, datetime.time):
now = _get_naive_now()
# This will not use the right date in the race condition where now
# is just before the date change and value is just past 0:00.
value = datetime.datetime.combine(now.date(), value)
else:
# No explicit time / datetime value -- no checks necessary
return []
# At this point, value is a datetime object.
return self._check_if_value_fixed(value, now=now)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now is not False:
kwargs["auto_now"] = self.auto_now
if self.auto_now_add is not False:
kwargs["auto_now_add"] = self.auto_now_add
if self.auto_now or self.auto_now_add:
del kwargs["blank"]
del kwargs["editable"]
return name, path, args, kwargs
def get_internal_type(self):
return "TimeField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, datetime.time):
return value
if isinstance(value, datetime.datetime):
# Not usually a good idea to pass in a datetime here (it loses
# information), but this can be a side-effect of interacting with a
# database backend (e.g. Oracle), so we'll be accommodating.
return value.time()
try:
parsed = parse_time(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_time"],
code="invalid_time",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.datetime.now().time()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts times into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_timefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.TimeField,
**kwargs,
}
)
class URLField(CharField):
default_validators = [validators.URLValidator()]
description = _("URL")
def __init__(self, verbose_name=None, name=None, **kwargs):
kwargs.setdefault("max_length", 200)
super().__init__(verbose_name, name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 200:
del kwargs["max_length"]
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed
# twice.
return super().formfield(
**{
"form_class": forms.URLField,
**kwargs,
}
)
class BinaryField(Field):
description = _("Raw binary data")
empty_values = [None, b""]
def __init__(self, *args, **kwargs):
kwargs.setdefault("editable", False)
super().__init__(*args, **kwargs)
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
return [*super().check(**kwargs), *self._check_str_default_value()]
def _check_str_default_value(self):
if self.has_default() and isinstance(self.default, str):
return [
checks.Error(
"BinaryField's default cannot be a string. Use bytes "
"content instead.",
obj=self,
id="fields.E170",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.editable:
kwargs["editable"] = True
else:
del kwargs["editable"]
return name, path, args, kwargs
def get_internal_type(self):
return "BinaryField"
def get_placeholder(self, value, compiler, connection):
return connection.ops.binary_placeholder_sql(value)
def get_default(self):
if self.has_default() and not callable(self.default):
return self.default
default = super().get_default()
if default == "":
return b""
return default
def get_db_prep_value(self, value, connection, prepared=False):
value = super().get_db_prep_value(value, connection, prepared)
if value is not None:
return connection.Database.Binary(value)
return value
def value_to_string(self, obj):
"""Binary data is serialized as base64"""
return b64encode(self.value_from_object(obj)).decode("ascii")
def to_python(self, value):
# If it's a string, it should be base64-encoded data
if isinstance(value, str):
return memoryview(b64decode(value.encode("ascii")))
return value
class UUIDField(Field):
default_error_messages = {
"invalid": _("“%(value)s” is not a valid UUID."),
}
description = _("Universally unique identifier")
empty_strings_allowed = False
def __init__(self, verbose_name=None, **kwargs):
kwargs["max_length"] = 32
super().__init__(verbose_name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return "UUIDField"
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
if not isinstance(value, uuid.UUID):
value = self.to_python(value)
if connection.features.has_native_uuid_field:
return value
return value.hex
def to_python(self, value):
if value is not None and not isinstance(value, uuid.UUID):
input_form = "int" if isinstance(value, int) else "hex"
try:
return uuid.UUID(**{input_form: value})
except (AttributeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
return value
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.UUIDField,
**kwargs,
}
)
class AutoFieldMixin:
db_returning = True
def __init__(self, *args, **kwargs):
kwargs["blank"] = True
super().__init__(*args, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_primary_key(),
]
def _check_primary_key(self):
if not self.primary_key:
return [
checks.Error(
"AutoFields must set primary_key=True.",
obj=self,
id="fields.E100",
),
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["blank"]
kwargs["primary_key"] = True
return name, path, args, kwargs
def validate(self, value, model_instance):
pass
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
value = connection.ops.validate_autopk_value(value)
return value
def contribute_to_class(self, cls, name, **kwargs):
if cls._meta.auto_field:
raise ValueError(
"Model %s can't have more than one auto-generated field."
% cls._meta.label
)
super().contribute_to_class(cls, name, **kwargs)
cls._meta.auto_field = self
def formfield(self, **kwargs):
return None
class AutoFieldMeta(type):
"""
Metaclass to maintain backward inheritance compatibility for AutoField.
It is intended that AutoFieldMixin become public API when it is possible to
create a non-integer automatically-generated field using column defaults
stored in the database.
In many areas Django also relies on using isinstance() to check for an
automatically-generated field as a subclass of AutoField. A new flag needs
to be implemented on Field to be used instead.
When these issues have been addressed, this metaclass could be used to
deprecate inheritance from AutoField and use of isinstance() with AutoField
for detecting automatically-generated fields.
"""
@property
def _subclasses(self):
return (BigAutoField, SmallAutoField)
def __instancecheck__(self, instance):
return isinstance(instance, self._subclasses) or super().__instancecheck__(
instance
)
def __subclasscheck__(self, subclass):
return issubclass(subclass, self._subclasses) or super().__subclasscheck__(
subclass
)
class AutoField(AutoFieldMixin, IntegerField, metaclass=AutoFieldMeta):
def get_internal_type(self):
return "AutoField"
def rel_db_type(self, connection):
return IntegerField().db_type(connection=connection)
class BigAutoField(AutoFieldMixin, BigIntegerField):
def get_internal_type(self):
return "BigAutoField"
def rel_db_type(self, connection):
return BigIntegerField().db_type(connection=connection)
class SmallAutoField(AutoFieldMixin, SmallIntegerField):
def get_internal_type(self):
return "SmallAutoField"
def rel_db_type(self, connection):
return SmallIntegerField().db_type(connection=connection)
|
66d3bc952644fae8996d7beacbc01c3c633daab617a33ede807d885ee68c078f | import functools
import inspect
from functools import partial
from django import forms
from django.apps import apps
from django.conf import SettingsReference, settings
from django.core import checks, exceptions
from django.db import connection, router
from django.db.backends import utils
from django.db.models import Q
from django.db.models.constants import LOOKUP_SEP
from django.db.models.deletion import CASCADE, SET_DEFAULT, SET_NULL
from django.db.models.query_utils import PathInfo
from django.db.models.utils import make_model_tuple
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from . import Field
from .mixins import FieldCacheMixin
from .related_descriptors import (
ForeignKeyDeferredAttribute,
ForwardManyToOneDescriptor,
ForwardOneToOneDescriptor,
ManyToManyDescriptor,
ReverseManyToOneDescriptor,
ReverseOneToOneDescriptor,
)
from .related_lookups import (
RelatedExact,
RelatedGreaterThan,
RelatedGreaterThanOrEqual,
RelatedIn,
RelatedIsNull,
RelatedLessThan,
RelatedLessThanOrEqual,
)
from .reverse_related import ForeignObjectRel, ManyToManyRel, ManyToOneRel, OneToOneRel
RECURSIVE_RELATIONSHIP_CONSTANT = "self"
def resolve_relation(scope_model, relation):
"""
Transform relation into a model or fully-qualified model string of the form
"app_label.ModelName", relative to scope_model.
The relation argument can be:
* RECURSIVE_RELATIONSHIP_CONSTANT, i.e. the string "self", in which case
the model argument will be returned.
* A bare model name without an app_label, in which case scope_model's
app_label will be prepended.
* An "app_label.ModelName" string.
* A model class, which will be returned unchanged.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
relation = scope_model
# Look for an "app.Model" relation
if isinstance(relation, str):
if "." not in relation:
relation = "%s.%s" % (scope_model._meta.app_label, relation)
return relation
def lazy_related_operation(function, model, *related_models, **kwargs):
"""
Schedule `function` to be called once `model` and all `related_models`
have been imported and registered with the app registry. `function` will
be called with the newly-loaded model classes as its positional arguments,
plus any optional keyword arguments.
The `model` argument must be a model class. Each subsequent positional
argument is another model, or a reference to another model - see
`resolve_relation()` for the various forms these may take. Any relative
references will be resolved relative to `model`.
This is a convenience wrapper for `Apps.lazy_model_operation` - the app
registry model used is the one found in `model._meta.apps`.
"""
models = [model] + [resolve_relation(model, rel) for rel in related_models]
model_keys = (make_model_tuple(m) for m in models)
apps = model._meta.apps
return apps.lazy_model_operation(partial(function, **kwargs), *model_keys)
class RelatedField(FieldCacheMixin, Field):
"""Base class that all relational fields inherit from."""
# Field flags
one_to_many = False
one_to_one = False
many_to_many = False
many_to_one = False
def __init__(
self,
related_name=None,
related_query_name=None,
limit_choices_to=None,
**kwargs,
):
self._related_name = related_name
self._related_query_name = related_query_name
self._limit_choices_to = limit_choices_to
super().__init__(**kwargs)
@cached_property
def related_model(self):
# Can't cache this property until all the models are loaded.
apps.check_models_ready()
return self.remote_field.model
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_related_name_is_valid(),
*self._check_related_query_name_is_valid(),
*self._check_relation_model_exists(),
*self._check_referencing_to_swapped_model(),
*self._check_clashes(),
]
def _check_related_name_is_valid(self):
import keyword
related_name = self.remote_field.related_name
if related_name is None:
return []
is_valid_id = (
not keyword.iskeyword(related_name) and related_name.isidentifier()
)
if not (is_valid_id or related_name.endswith("+")):
return [
checks.Error(
"The name '%s' is invalid related_name for field %s.%s"
% (
self.remote_field.related_name,
self.model._meta.object_name,
self.name,
),
hint=(
"Related name must be a valid Python identifier or end with a "
"'+'"
),
obj=self,
id="fields.E306",
)
]
return []
def _check_related_query_name_is_valid(self):
if self.remote_field.is_hidden():
return []
rel_query_name = self.related_query_name()
errors = []
if rel_query_name.endswith("_"):
errors.append(
checks.Error(
"Reverse query name '%s' must not end with an underscore."
% rel_query_name,
hint=(
"Add or change a related_name or related_query_name "
"argument for this field."
),
obj=self,
id="fields.E308",
)
)
if LOOKUP_SEP in rel_query_name:
errors.append(
checks.Error(
"Reverse query name '%s' must not contain '%s'."
% (rel_query_name, LOOKUP_SEP),
hint=(
"Add or change a related_name or related_query_name "
"argument for this field."
),
obj=self,
id="fields.E309",
)
)
return errors
def _check_relation_model_exists(self):
rel_is_missing = self.remote_field.model not in self.opts.apps.get_models()
rel_is_string = isinstance(self.remote_field.model, str)
model_name = (
self.remote_field.model
if rel_is_string
else self.remote_field.model._meta.object_name
)
if rel_is_missing and (
rel_is_string or not self.remote_field.model._meta.swapped
):
return [
checks.Error(
"Field defines a relation with model '%s', which is either "
"not installed, or is abstract." % model_name,
obj=self,
id="fields.E300",
)
]
return []
def _check_referencing_to_swapped_model(self):
if (
self.remote_field.model not in self.opts.apps.get_models()
and not isinstance(self.remote_field.model, str)
and self.remote_field.model._meta.swapped
):
return [
checks.Error(
"Field defines a relation with the model '%s', which has "
"been swapped out." % self.remote_field.model._meta.label,
hint="Update the relation to point at 'settings.%s'."
% self.remote_field.model._meta.swappable,
obj=self,
id="fields.E301",
)
]
return []
def _check_clashes(self):
"""Check accessor and reverse query name clashes."""
from django.db.models.base import ModelBase
errors = []
opts = self.model._meta
# f.remote_field.model may be a string instead of a model. Skip if
# model name is not resolved.
if not isinstance(self.remote_field.model, ModelBase):
return []
# Consider that we are checking field `Model.foreign` and the models
# are:
#
# class Target(models.Model):
# model = models.IntegerField()
# model_set = models.IntegerField()
#
# class Model(models.Model):
# foreign = models.ForeignKey(Target)
# m2m = models.ManyToManyField(Target)
# rel_opts.object_name == "Target"
rel_opts = self.remote_field.model._meta
# If the field doesn't install a backward relation on the target model
# (so `is_hidden` returns True), then there are no clashes to check
# and we can skip these fields.
rel_is_hidden = self.remote_field.is_hidden()
rel_name = self.remote_field.get_accessor_name() # i. e. "model_set"
rel_query_name = self.related_query_name() # i. e. "model"
# i.e. "app_label.Model.field".
field_name = "%s.%s" % (opts.label, self.name)
# Check clashes between accessor or reverse query name of `field`
# and any other field name -- i.e. accessor for Model.foreign is
# model_set and it clashes with Target.model_set.
potential_clashes = rel_opts.fields + rel_opts.many_to_many
for clash_field in potential_clashes:
# i.e. "app_label.Target.model_set".
clash_name = "%s.%s" % (rel_opts.label, clash_field.name)
if not rel_is_hidden and clash_field.name == rel_name:
errors.append(
checks.Error(
f"Reverse accessor '{rel_opts.object_name}.{rel_name}' "
f"for '{field_name}' clashes with field name "
f"'{clash_name}'.",
hint=(
"Rename field '%s', or add/change a related_name "
"argument to the definition for field '%s'."
)
% (clash_name, field_name),
obj=self,
id="fields.E302",
)
)
if clash_field.name == rel_query_name:
errors.append(
checks.Error(
"Reverse query name for '%s' clashes with field name '%s'."
% (field_name, clash_name),
hint=(
"Rename field '%s', or add/change a related_name "
"argument to the definition for field '%s'."
)
% (clash_name, field_name),
obj=self,
id="fields.E303",
)
)
# Check clashes between accessors/reverse query names of `field` and
# any other field accessor -- i. e. Model.foreign accessor clashes with
# Model.m2m accessor.
potential_clashes = (r for r in rel_opts.related_objects if r.field is not self)
for clash_field in potential_clashes:
# i.e. "app_label.Model.m2m".
clash_name = "%s.%s" % (
clash_field.related_model._meta.label,
clash_field.field.name,
)
if not rel_is_hidden and clash_field.get_accessor_name() == rel_name:
errors.append(
checks.Error(
f"Reverse accessor '{rel_opts.object_name}.{rel_name}' "
f"for '{field_name}' clashes with reverse accessor for "
f"'{clash_name}'.",
hint=(
"Add or change a related_name argument "
"to the definition for '%s' or '%s'."
)
% (field_name, clash_name),
obj=self,
id="fields.E304",
)
)
if clash_field.get_accessor_name() == rel_query_name:
errors.append(
checks.Error(
"Reverse query name for '%s' clashes with reverse query name "
"for '%s'." % (field_name, clash_name),
hint=(
"Add or change a related_name argument "
"to the definition for '%s' or '%s'."
)
% (field_name, clash_name),
obj=self,
id="fields.E305",
)
)
return errors
def db_type(self, connection):
# By default related field will not have a column as it relates to
# columns from another table.
return None
def contribute_to_class(self, cls, name, private_only=False, **kwargs):
super().contribute_to_class(cls, name, private_only=private_only, **kwargs)
self.opts = cls._meta
if not cls._meta.abstract:
if self.remote_field.related_name:
related_name = self.remote_field.related_name
else:
related_name = self.opts.default_related_name
if related_name:
related_name = related_name % {
"class": cls.__name__.lower(),
"model_name": cls._meta.model_name.lower(),
"app_label": cls._meta.app_label.lower(),
}
self.remote_field.related_name = related_name
if self.remote_field.related_query_name:
related_query_name = self.remote_field.related_query_name % {
"class": cls.__name__.lower(),
"app_label": cls._meta.app_label.lower(),
}
self.remote_field.related_query_name = related_query_name
def resolve_related_class(model, related, field):
field.remote_field.model = related
field.do_related_class(related, model)
lazy_related_operation(
resolve_related_class, cls, self.remote_field.model, field=self
)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self._limit_choices_to:
kwargs["limit_choices_to"] = self._limit_choices_to
if self._related_name is not None:
kwargs["related_name"] = self._related_name
if self._related_query_name is not None:
kwargs["related_query_name"] = self._related_query_name
return name, path, args, kwargs
def get_forward_related_filter(self, obj):
"""
Return the keyword arguments that when supplied to
self.model.object.filter(), would select all instances related through
this field to the remote obj. This is used to build the querysets
returned by related descriptors. obj is an instance of
self.related_field.model.
"""
return {
"%s__%s" % (self.name, rh_field.name): getattr(obj, rh_field.attname)
for _, rh_field in self.related_fields
}
def get_reverse_related_filter(self, obj):
"""
Complement to get_forward_related_filter(). Return the keyword
arguments that when passed to self.related_field.model.object.filter()
select all instances of self.related_field.model related through
this field to obj. obj is an instance of self.model.
"""
base_filter = (
(rh_field.attname, getattr(obj, lh_field.attname))
for lh_field, rh_field in self.related_fields
)
descriptor_filter = self.get_extra_descriptor_filter(obj)
base_q = Q(*base_filter)
if isinstance(descriptor_filter, dict):
return base_q & Q(**descriptor_filter)
elif descriptor_filter:
return base_q & descriptor_filter
return base_q
@property
def swappable_setting(self):
"""
Get the setting that this is powered from for swapping, or None
if it's not swapped in / marked with swappable=False.
"""
if self.swappable:
# Work out string form of "to"
if isinstance(self.remote_field.model, str):
to_string = self.remote_field.model
else:
to_string = self.remote_field.model._meta.label
return apps.get_swappable_settings_name(to_string)
return None
def set_attributes_from_rel(self):
self.name = self.name or (
self.remote_field.model._meta.model_name
+ "_"
+ self.remote_field.model._meta.pk.name
)
if self.verbose_name is None:
self.verbose_name = self.remote_field.model._meta.verbose_name
self.remote_field.set_field_name()
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
self.contribute_to_related_class(other, self.remote_field)
def get_limit_choices_to(self):
"""
Return ``limit_choices_to`` for this model field.
If it is a callable, it will be invoked and the result will be
returned.
"""
if callable(self.remote_field.limit_choices_to):
return self.remote_field.limit_choices_to()
return self.remote_field.limit_choices_to
def formfield(self, **kwargs):
"""
Pass ``limit_choices_to`` to the field being constructed.
Only passes it if there is a type that supports related fields.
This is a similar strategy used to pass the ``queryset`` to the field
being constructed.
"""
defaults = {}
if hasattr(self.remote_field, "get_related_field"):
# If this is a callable, do not invoke it here. Just pass
# it in the defaults for when the form class will later be
# instantiated.
limit_choices_to = self.remote_field.limit_choices_to
defaults.update(
{
"limit_choices_to": limit_choices_to,
}
)
defaults.update(kwargs)
return super().formfield(**defaults)
def related_query_name(self):
"""
Define the name that can be used to identify this related object in a
table-spanning query.
"""
return (
self.remote_field.related_query_name
or self.remote_field.related_name
or self.opts.model_name
)
@property
def target_field(self):
"""
When filtering against this relation, return the field on the remote
model against which the filtering should happen.
"""
target_fields = self.path_infos[-1].target_fields
if len(target_fields) > 1:
raise exceptions.FieldError(
"The relation has multiple target fields, but only single target field "
"was asked for"
)
return target_fields[0]
def get_cache_name(self):
return self.name
class ForeignObject(RelatedField):
"""
Abstraction of the ForeignKey relation to support multi-column relations.
"""
# Field flags
many_to_many = False
many_to_one = True
one_to_many = False
one_to_one = False
requires_unique_target = True
related_accessor_class = ReverseManyToOneDescriptor
forward_related_accessor_class = ForwardManyToOneDescriptor
rel_class = ForeignObjectRel
def __init__(
self,
to,
on_delete,
from_fields,
to_fields,
rel=None,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
swappable=True,
**kwargs,
):
if rel is None:
rel = self.rel_class(
self,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
super().__init__(
rel=rel,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
**kwargs,
)
self.from_fields = from_fields
self.to_fields = to_fields
self.swappable = swappable
def __copy__(self):
obj = super().__copy__()
# Remove any cached PathInfo values.
obj.__dict__.pop("path_infos", None)
obj.__dict__.pop("reverse_path_infos", None)
return obj
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_to_fields_exist(),
*self._check_unique_target(),
]
def _check_to_fields_exist(self):
# Skip nonexistent models.
if isinstance(self.remote_field.model, str):
return []
errors = []
for to_field in self.to_fields:
if to_field:
try:
self.remote_field.model._meta.get_field(to_field)
except exceptions.FieldDoesNotExist:
errors.append(
checks.Error(
"The to_field '%s' doesn't exist on the related "
"model '%s'."
% (to_field, self.remote_field.model._meta.label),
obj=self,
id="fields.E312",
)
)
return errors
def _check_unique_target(self):
rel_is_string = isinstance(self.remote_field.model, str)
if rel_is_string or not self.requires_unique_target:
return []
try:
self.foreign_related_fields
except exceptions.FieldDoesNotExist:
return []
if not self.foreign_related_fields:
return []
unique_foreign_fields = {
frozenset([f.name])
for f in self.remote_field.model._meta.get_fields()
if getattr(f, "unique", False)
}
unique_foreign_fields.update(
{frozenset(ut) for ut in self.remote_field.model._meta.unique_together}
)
unique_foreign_fields.update(
{
frozenset(uc.fields)
for uc in self.remote_field.model._meta.total_unique_constraints
}
)
foreign_fields = {f.name for f in self.foreign_related_fields}
has_unique_constraint = any(u <= foreign_fields for u in unique_foreign_fields)
if not has_unique_constraint and len(self.foreign_related_fields) > 1:
field_combination = ", ".join(
"'%s'" % rel_field.name for rel_field in self.foreign_related_fields
)
model_name = self.remote_field.model.__name__
return [
checks.Error(
"No subset of the fields %s on model '%s' is unique."
% (field_combination, model_name),
hint=(
"Mark a single field as unique=True or add a set of "
"fields to a unique constraint (via unique_together "
"or a UniqueConstraint (without condition) in the "
"model Meta.constraints)."
),
obj=self,
id="fields.E310",
)
]
elif not has_unique_constraint:
field_name = self.foreign_related_fields[0].name
model_name = self.remote_field.model.__name__
return [
checks.Error(
"'%s.%s' must be unique because it is referenced by "
"a foreign key." % (model_name, field_name),
hint=(
"Add unique=True to this field or add a "
"UniqueConstraint (without condition) in the model "
"Meta.constraints."
),
obj=self,
id="fields.E311",
)
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
kwargs["on_delete"] = self.remote_field.on_delete
kwargs["from_fields"] = self.from_fields
kwargs["to_fields"] = self.to_fields
if self.remote_field.parent_link:
kwargs["parent_link"] = self.remote_field.parent_link
if isinstance(self.remote_field.model, str):
if "." in self.remote_field.model:
app_label, model_name = self.remote_field.model.split(".")
kwargs["to"] = "%s.%s" % (app_label, model_name.lower())
else:
kwargs["to"] = self.remote_field.model.lower()
else:
kwargs["to"] = self.remote_field.model._meta.label_lower
# If swappable is True, then see if we're actually pointing to the target
# of a swap.
swappable_setting = self.swappable_setting
if swappable_setting is not None:
# If it's already a settings reference, error
if hasattr(kwargs["to"], "setting_name"):
if kwargs["to"].setting_name != swappable_setting:
raise ValueError(
"Cannot deconstruct a ForeignKey pointing to a model "
"that is swapped in place of more than one model (%s and %s)"
% (kwargs["to"].setting_name, swappable_setting)
)
# Set it
kwargs["to"] = SettingsReference(
kwargs["to"],
swappable_setting,
)
return name, path, args, kwargs
def resolve_related_fields(self):
if not self.from_fields or len(self.from_fields) != len(self.to_fields):
raise ValueError(
"Foreign Object from and to fields must be the same non-zero length"
)
if isinstance(self.remote_field.model, str):
raise ValueError(
"Related model %r cannot be resolved" % self.remote_field.model
)
related_fields = []
for index in range(len(self.from_fields)):
from_field_name = self.from_fields[index]
to_field_name = self.to_fields[index]
from_field = (
self
if from_field_name == RECURSIVE_RELATIONSHIP_CONSTANT
else self.opts.get_field(from_field_name)
)
to_field = (
self.remote_field.model._meta.pk
if to_field_name is None
else self.remote_field.model._meta.get_field(to_field_name)
)
related_fields.append((from_field, to_field))
return related_fields
@cached_property
def related_fields(self):
return self.resolve_related_fields()
@cached_property
def reverse_related_fields(self):
return [(rhs_field, lhs_field) for lhs_field, rhs_field in self.related_fields]
@cached_property
def local_related_fields(self):
return tuple(lhs_field for lhs_field, rhs_field in self.related_fields)
@cached_property
def foreign_related_fields(self):
return tuple(
rhs_field for lhs_field, rhs_field in self.related_fields if rhs_field
)
def get_local_related_value(self, instance):
return self.get_instance_value_for_fields(instance, self.local_related_fields)
def get_foreign_related_value(self, instance):
return self.get_instance_value_for_fields(instance, self.foreign_related_fields)
@staticmethod
def get_instance_value_for_fields(instance, fields):
ret = []
opts = instance._meta
for field in fields:
# Gotcha: in some cases (like fixture loading) a model can have
# different values in parent_ptr_id and parent's id. So, use
# instance.pk (that is, parent_ptr_id) when asked for instance.id.
if field.primary_key:
possible_parent_link = opts.get_ancestor_link(field.model)
if (
not possible_parent_link
or possible_parent_link.primary_key
or possible_parent_link.model._meta.abstract
):
ret.append(instance.pk)
continue
ret.append(getattr(instance, field.attname))
return tuple(ret)
def get_attname_column(self):
attname, column = super().get_attname_column()
return attname, None
def get_joining_columns(self, reverse_join=False):
source = self.reverse_related_fields if reverse_join else self.related_fields
return tuple(
(lhs_field.column, rhs_field.column) for lhs_field, rhs_field in source
)
def get_reverse_joining_columns(self):
return self.get_joining_columns(reverse_join=True)
def get_extra_descriptor_filter(self, instance):
"""
Return an extra filter condition for related object fetching when
user does 'instance.fieldname', that is the extra filter is used in
the descriptor of the field.
The filter should be either a dict usable in .filter(**kwargs) call or
a Q-object. The condition will be ANDed together with the relation's
joining columns.
A parallel method is get_extra_restriction() which is used in
JOIN and subquery conditions.
"""
return {}
def get_extra_restriction(self, alias, related_alias):
"""
Return a pair condition used for joining and subquery pushdown. The
condition is something that responds to as_sql(compiler, connection)
method.
Note that currently referring both the 'alias' and 'related_alias'
will not work in some conditions, like subquery pushdown.
A parallel method is get_extra_descriptor_filter() which is used in
instance.fieldname related object fetching.
"""
return None
def get_path_info(self, filtered_relation=None):
"""Get path from this field to the related model."""
opts = self.remote_field.model._meta
from_opts = self.model._meta
return [
PathInfo(
from_opts=from_opts,
to_opts=opts,
target_fields=self.foreign_related_fields,
join_field=self,
m2m=False,
direct=True,
filtered_relation=filtered_relation,
)
]
@cached_property
def path_infos(self):
return self.get_path_info()
def get_reverse_path_info(self, filtered_relation=None):
"""Get path from the related model to this field's model."""
opts = self.model._meta
from_opts = self.remote_field.model._meta
return [
PathInfo(
from_opts=from_opts,
to_opts=opts,
target_fields=(opts.pk,),
join_field=self.remote_field,
m2m=not self.unique,
direct=False,
filtered_relation=filtered_relation,
)
]
@cached_property
def reverse_path_infos(self):
return self.get_reverse_path_info()
@classmethod
@functools.lru_cache(maxsize=None)
def get_lookups(cls):
bases = inspect.getmro(cls)
bases = bases[: bases.index(ForeignObject) + 1]
class_lookups = [parent.__dict__.get("class_lookups", {}) for parent in bases]
return cls.merge_dicts(class_lookups)
def contribute_to_class(self, cls, name, private_only=False, **kwargs):
super().contribute_to_class(cls, name, private_only=private_only, **kwargs)
setattr(cls, self.name, self.forward_related_accessor_class(self))
def contribute_to_related_class(self, cls, related):
# Internal FK's - i.e., those with a related name ending with '+' -
# and swapped models don't get a related descriptor.
if (
not self.remote_field.is_hidden()
and not related.related_model._meta.swapped
):
setattr(
cls._meta.concrete_model,
related.get_accessor_name(),
self.related_accessor_class(related),
)
# While 'limit_choices_to' might be a callable, simply pass
# it along for later - this is too early because it's still
# model load time.
if self.remote_field.limit_choices_to:
cls._meta.related_fkey_lookups.append(
self.remote_field.limit_choices_to
)
ForeignObject.register_lookup(RelatedIn)
ForeignObject.register_lookup(RelatedExact)
ForeignObject.register_lookup(RelatedLessThan)
ForeignObject.register_lookup(RelatedGreaterThan)
ForeignObject.register_lookup(RelatedGreaterThanOrEqual)
ForeignObject.register_lookup(RelatedLessThanOrEqual)
ForeignObject.register_lookup(RelatedIsNull)
class ForeignKey(ForeignObject):
"""
Provide a many-to-one relation by adding a column to the local model
to hold the remote value.
By default ForeignKey will target the pk of the remote model but this
behavior can be changed by using the ``to_field`` argument.
"""
descriptor_class = ForeignKeyDeferredAttribute
# Field flags
many_to_many = False
many_to_one = True
one_to_many = False
one_to_one = False
rel_class = ManyToOneRel
empty_strings_allowed = False
default_error_messages = {
"invalid": _("%(model)s instance with %(field)s %(value)r does not exist.")
}
description = _("Foreign Key (type determined by related field)")
def __init__(
self,
to,
on_delete,
related_name=None,
related_query_name=None,
limit_choices_to=None,
parent_link=False,
to_field=None,
db_constraint=True,
**kwargs,
):
try:
to._meta.model_name
except AttributeError:
if not isinstance(to, str):
raise TypeError(
"%s(%r) is invalid. First parameter to ForeignKey must be "
"either a model, a model name, or the string %r"
% (
self.__class__.__name__,
to,
RECURSIVE_RELATIONSHIP_CONSTANT,
)
)
else:
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
if not callable(on_delete):
raise TypeError("on_delete must be callable.")
kwargs["rel"] = self.rel_class(
self,
to,
to_field,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
kwargs.setdefault("db_index", True)
super().__init__(
to,
on_delete,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
from_fields=[RECURSIVE_RELATIONSHIP_CONSTANT],
to_fields=[to_field],
**kwargs,
)
self.db_constraint = db_constraint
def __class_getitem__(cls, *args, **kwargs):
return cls
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_on_delete(),
*self._check_unique(),
]
def _check_on_delete(self):
on_delete = getattr(self.remote_field, "on_delete", None)
if on_delete == SET_NULL and not self.null:
return [
checks.Error(
"Field specifies on_delete=SET_NULL, but cannot be null.",
hint=(
"Set null=True argument on the field, or change the on_delete "
"rule."
),
obj=self,
id="fields.E320",
)
]
elif on_delete == SET_DEFAULT and not self.has_default():
return [
checks.Error(
"Field specifies on_delete=SET_DEFAULT, but has no default value.",
hint="Set a default value, or change the on_delete rule.",
obj=self,
id="fields.E321",
)
]
else:
return []
def _check_unique(self, **kwargs):
return (
[
checks.Warning(
"Setting unique=True on a ForeignKey has the same effect as using "
"a OneToOneField.",
hint=(
"ForeignKey(unique=True) is usually better served by a "
"OneToOneField."
),
obj=self,
id="fields.W342",
)
]
if self.unique
else []
)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["to_fields"]
del kwargs["from_fields"]
# Handle the simpler arguments
if self.db_index:
del kwargs["db_index"]
else:
kwargs["db_index"] = False
if self.db_constraint is not True:
kwargs["db_constraint"] = self.db_constraint
# Rel needs more work.
to_meta = getattr(self.remote_field.model, "_meta", None)
if self.remote_field.field_name and (
not to_meta
or (to_meta.pk and self.remote_field.field_name != to_meta.pk.name)
):
kwargs["to_field"] = self.remote_field.field_name
return name, path, args, kwargs
def to_python(self, value):
return self.target_field.to_python(value)
@property
def target_field(self):
return self.foreign_related_fields[0]
def get_reverse_path_info(self, filtered_relation=None):
"""Get path from the related model to this field's model."""
opts = self.model._meta
from_opts = self.remote_field.model._meta
return [
PathInfo(
from_opts=from_opts,
to_opts=opts,
target_fields=(opts.pk,),
join_field=self.remote_field,
m2m=not self.unique,
direct=False,
filtered_relation=filtered_relation,
)
]
def validate(self, value, model_instance):
if self.remote_field.parent_link:
return
super().validate(value, model_instance)
if value is None:
return
using = router.db_for_read(self.remote_field.model, instance=model_instance)
qs = self.remote_field.model._base_manager.using(using).filter(
**{self.remote_field.field_name: value}
)
qs = qs.complex_filter(self.get_limit_choices_to())
if not qs.exists():
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={
"model": self.remote_field.model._meta.verbose_name,
"pk": value,
"field": self.remote_field.field_name,
"value": value,
}, # 'pk' is included for backwards compatibility
)
def resolve_related_fields(self):
related_fields = super().resolve_related_fields()
for from_field, to_field in related_fields:
if (
to_field
and to_field.model != self.remote_field.model._meta.concrete_model
):
raise exceptions.FieldError(
"'%s.%s' refers to field '%s' which is not local to model "
"'%s'."
% (
self.model._meta.label,
self.name,
to_field.name,
self.remote_field.model._meta.concrete_model._meta.label,
)
)
return related_fields
def get_attname(self):
return "%s_id" % self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_default(self):
"""Return the to_field if the default value is an object."""
field_default = super().get_default()
if isinstance(field_default, self.remote_field.model):
return getattr(field_default, self.target_field.attname)
return field_default
def get_db_prep_save(self, value, connection):
if value is None or (
value == ""
and (
not self.target_field.empty_strings_allowed
or connection.features.interprets_empty_strings_as_nulls
)
):
return None
else:
return self.target_field.get_db_prep_save(value, connection=connection)
def get_db_prep_value(self, value, connection, prepared=False):
return self.target_field.get_db_prep_value(value, connection, prepared)
def get_prep_value(self, value):
return self.target_field.get_prep_value(value)
def contribute_to_related_class(self, cls, related):
super().contribute_to_related_class(cls, related)
if self.remote_field.field_name is None:
self.remote_field.field_name = cls._meta.pk.name
def formfield(self, *, using=None, **kwargs):
if isinstance(self.remote_field.model, str):
raise ValueError(
"Cannot create form field for %r yet, because "
"its related model %r has not been loaded yet"
% (self.name, self.remote_field.model)
)
return super().formfield(
**{
"form_class": forms.ModelChoiceField,
"queryset": self.remote_field.model._default_manager.using(using),
"to_field_name": self.remote_field.field_name,
**kwargs,
"blank": self.blank,
}
)
def db_check(self, connection):
return None
def db_type(self, connection):
return self.target_field.rel_db_type(connection=connection)
def db_parameters(self, connection):
return {"type": self.db_type(connection), "check": self.db_check(connection)}
def convert_empty_strings(self, value, expression, connection):
if (not value) and isinstance(value, str):
return None
return value
def get_db_converters(self, connection):
converters = super().get_db_converters(connection)
if connection.features.interprets_empty_strings_as_nulls:
converters += [self.convert_empty_strings]
return converters
def get_col(self, alias, output_field=None):
if output_field is None:
output_field = self.target_field
while isinstance(output_field, ForeignKey):
output_field = output_field.target_field
if output_field is self:
raise ValueError("Cannot resolve output_field.")
return super().get_col(alias, output_field)
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that it always carries a "unique" constraint with it and the reverse
relation always returns the object pointed to (since there will only ever
be one), rather than returning a list.
"""
# Field flags
many_to_many = False
many_to_one = False
one_to_many = False
one_to_one = True
related_accessor_class = ReverseOneToOneDescriptor
forward_related_accessor_class = ForwardOneToOneDescriptor
rel_class = OneToOneRel
description = _("One-to-one relationship")
def __init__(self, to, on_delete, to_field=None, **kwargs):
kwargs["unique"] = True
super().__init__(to, on_delete, to_field=to_field, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if "unique" in kwargs:
del kwargs["unique"]
return name, path, args, kwargs
def formfield(self, **kwargs):
if self.remote_field.parent_link:
return None
return super().formfield(**kwargs)
def save_form_data(self, instance, data):
if isinstance(data, self.remote_field.model):
setattr(instance, self.name, data)
else:
setattr(instance, self.attname, data)
# Remote field object must be cleared otherwise Model.save()
# will reassign attname using the related object pk.
if data is None:
setattr(instance, self.name, data)
def _check_unique(self, **kwargs):
# Override ForeignKey since check isn't applicable here.
return []
def create_many_to_many_intermediary_model(field, klass):
from django.db import models
def set_managed(model, related, through):
through._meta.managed = model._meta.managed or related._meta.managed
to_model = resolve_relation(klass, field.remote_field.model)
name = "%s_%s" % (klass._meta.object_name, field.name)
lazy_related_operation(set_managed, klass, to_model, name)
to = make_model_tuple(to_model)[1]
from_ = klass._meta.model_name
if to == from_:
to = "to_%s" % to
from_ = "from_%s" % from_
meta = type(
"Meta",
(),
{
"db_table": field._get_m2m_db_table(klass._meta),
"auto_created": klass,
"app_label": klass._meta.app_label,
"db_tablespace": klass._meta.db_tablespace,
"unique_together": (from_, to),
"verbose_name": _("%(from)s-%(to)s relationship")
% {"from": from_, "to": to},
"verbose_name_plural": _("%(from)s-%(to)s relationships")
% {"from": from_, "to": to},
"apps": field.model._meta.apps,
},
)
# Construct and return the new class.
return type(
name,
(models.Model,),
{
"Meta": meta,
"__module__": klass.__module__,
from_: models.ForeignKey(
klass,
related_name="%s+" % name,
db_tablespace=field.db_tablespace,
db_constraint=field.remote_field.db_constraint,
on_delete=CASCADE,
),
to: models.ForeignKey(
to_model,
related_name="%s+" % name,
db_tablespace=field.db_tablespace,
db_constraint=field.remote_field.db_constraint,
on_delete=CASCADE,
),
},
)
class ManyToManyField(RelatedField):
"""
Provide a many-to-many relation by using an intermediary model that
holds two ForeignKey fields pointed at the two sides of the relation.
Unless a ``through`` model was provided, ManyToManyField will use the
create_many_to_many_intermediary_model factory to automatically generate
the intermediary model.
"""
# Field flags
many_to_many = True
many_to_one = False
one_to_many = False
one_to_one = False
rel_class = ManyToManyRel
description = _("Many-to-many relationship")
def __init__(
self,
to,
related_name=None,
related_query_name=None,
limit_choices_to=None,
symmetrical=None,
through=None,
through_fields=None,
db_constraint=True,
db_table=None,
swappable=True,
**kwargs,
):
try:
to._meta
except AttributeError:
if not isinstance(to, str):
raise TypeError(
"%s(%r) is invalid. First parameter to ManyToManyField "
"must be either a model, a model name, or the string %r"
% (
self.__class__.__name__,
to,
RECURSIVE_RELATIONSHIP_CONSTANT,
)
)
if symmetrical is None:
symmetrical = to == RECURSIVE_RELATIONSHIP_CONSTANT
if through is not None and db_table is not None:
raise ValueError(
"Cannot specify a db_table if an intermediary model is used."
)
kwargs["rel"] = self.rel_class(
self,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
symmetrical=symmetrical,
through=through,
through_fields=through_fields,
db_constraint=db_constraint,
)
self.has_null_arg = "null" in kwargs
super().__init__(
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
**kwargs,
)
self.db_table = db_table
self.swappable = swappable
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_unique(**kwargs),
*self._check_relationship_model(**kwargs),
*self._check_ignored_options(**kwargs),
*self._check_table_uniqueness(**kwargs),
]
def _check_unique(self, **kwargs):
if self.unique:
return [
checks.Error(
"ManyToManyFields cannot be unique.",
obj=self,
id="fields.E330",
)
]
return []
def _check_ignored_options(self, **kwargs):
warnings = []
if self.has_null_arg:
warnings.append(
checks.Warning(
"null has no effect on ManyToManyField.",
obj=self,
id="fields.W340",
)
)
if self._validators:
warnings.append(
checks.Warning(
"ManyToManyField does not support validators.",
obj=self,
id="fields.W341",
)
)
if self.remote_field.symmetrical and self._related_name:
warnings.append(
checks.Warning(
"related_name has no effect on ManyToManyField "
'with a symmetrical relationship, e.g. to "self".',
obj=self,
id="fields.W345",
)
)
return warnings
def _check_relationship_model(self, from_model=None, **kwargs):
if hasattr(self.remote_field.through, "_meta"):
qualified_model_name = "%s.%s" % (
self.remote_field.through._meta.app_label,
self.remote_field.through.__name__,
)
else:
qualified_model_name = self.remote_field.through
errors = []
if self.remote_field.through not in self.opts.apps.get_models(
include_auto_created=True
):
# The relationship model is not installed.
errors.append(
checks.Error(
"Field specifies a many-to-many relation through model "
"'%s', which has not been installed." % qualified_model_name,
obj=self,
id="fields.E331",
)
)
else:
assert from_model is not None, (
"ManyToManyField with intermediate "
"tables cannot be checked if you don't pass the model "
"where the field is attached to."
)
# Set some useful local variables
to_model = resolve_relation(from_model, self.remote_field.model)
from_model_name = from_model._meta.object_name
if isinstance(to_model, str):
to_model_name = to_model
else:
to_model_name = to_model._meta.object_name
relationship_model_name = self.remote_field.through._meta.object_name
self_referential = from_model == to_model
# Count foreign keys in intermediate model
if self_referential:
seen_self = sum(
from_model == getattr(field.remote_field, "model", None)
for field in self.remote_field.through._meta.fields
)
if seen_self > 2 and not self.remote_field.through_fields:
errors.append(
checks.Error(
"The model is used as an intermediate model by "
"'%s', but it has more than two foreign keys "
"to '%s', which is ambiguous. You must specify "
"which two foreign keys Django should use via the "
"through_fields keyword argument."
% (self, from_model_name),
hint=(
"Use through_fields to specify which two foreign keys "
"Django should use."
),
obj=self.remote_field.through,
id="fields.E333",
)
)
else:
# Count foreign keys in relationship model
seen_from = sum(
from_model == getattr(field.remote_field, "model", None)
for field in self.remote_field.through._meta.fields
)
seen_to = sum(
to_model == getattr(field.remote_field, "model", None)
for field in self.remote_field.through._meta.fields
)
if seen_from > 1 and not self.remote_field.through_fields:
errors.append(
checks.Error(
(
"The model is used as an intermediate model by "
"'%s', but it has more than one foreign key "
"from '%s', which is ambiguous. You must specify "
"which foreign key Django should use via the "
"through_fields keyword argument."
)
% (self, from_model_name),
hint=(
"If you want to create a recursive relationship, "
'use ManyToManyField("%s", through="%s").'
)
% (
RECURSIVE_RELATIONSHIP_CONSTANT,
relationship_model_name,
),
obj=self,
id="fields.E334",
)
)
if seen_to > 1 and not self.remote_field.through_fields:
errors.append(
checks.Error(
"The model is used as an intermediate model by "
"'%s', but it has more than one foreign key "
"to '%s', which is ambiguous. You must specify "
"which foreign key Django should use via the "
"through_fields keyword argument." % (self, to_model_name),
hint=(
"If you want to create a recursive relationship, "
'use ManyToManyField("%s", through="%s").'
)
% (
RECURSIVE_RELATIONSHIP_CONSTANT,
relationship_model_name,
),
obj=self,
id="fields.E335",
)
)
if seen_from == 0 or seen_to == 0:
errors.append(
checks.Error(
"The model is used as an intermediate model by "
"'%s', but it does not have a foreign key to '%s' or '%s'."
% (self, from_model_name, to_model_name),
obj=self.remote_field.through,
id="fields.E336",
)
)
# Validate `through_fields`.
if self.remote_field.through_fields is not None:
# Validate that we're given an iterable of at least two items
# and that none of them is "falsy".
if not (
len(self.remote_field.through_fields) >= 2
and self.remote_field.through_fields[0]
and self.remote_field.through_fields[1]
):
errors.append(
checks.Error(
"Field specifies 'through_fields' but does not provide "
"the names of the two link fields that should be used "
"for the relation through model '%s'." % qualified_model_name,
hint=(
"Make sure you specify 'through_fields' as "
"through_fields=('field1', 'field2')"
),
obj=self,
id="fields.E337",
)
)
# Validate the given through fields -- they should be actual
# fields on the through model, and also be foreign keys to the
# expected models.
else:
assert from_model is not None, (
"ManyToManyField with intermediate "
"tables cannot be checked if you don't pass the model "
"where the field is attached to."
)
source, through, target = (
from_model,
self.remote_field.through,
self.remote_field.model,
)
source_field_name, target_field_name = self.remote_field.through_fields[
:2
]
for field_name, related_model in (
(source_field_name, source),
(target_field_name, target),
):
possible_field_names = []
for f in through._meta.fields:
if (
hasattr(f, "remote_field")
and getattr(f.remote_field, "model", None) == related_model
):
possible_field_names.append(f.name)
if possible_field_names:
hint = (
"Did you mean one of the following foreign keys to '%s': "
"%s?"
% (
related_model._meta.object_name,
", ".join(possible_field_names),
)
)
else:
hint = None
try:
field = through._meta.get_field(field_name)
except exceptions.FieldDoesNotExist:
errors.append(
checks.Error(
"The intermediary model '%s' has no field '%s'."
% (qualified_model_name, field_name),
hint=hint,
obj=self,
id="fields.E338",
)
)
else:
if not (
hasattr(field, "remote_field")
and getattr(field.remote_field, "model", None)
== related_model
):
errors.append(
checks.Error(
"'%s.%s' is not a foreign key to '%s'."
% (
through._meta.object_name,
field_name,
related_model._meta.object_name,
),
hint=hint,
obj=self,
id="fields.E339",
)
)
return errors
def _check_table_uniqueness(self, **kwargs):
if (
isinstance(self.remote_field.through, str)
or not self.remote_field.through._meta.managed
):
return []
registered_tables = {
model._meta.db_table: model
for model in self.opts.apps.get_models(include_auto_created=True)
if model != self.remote_field.through and model._meta.managed
}
m2m_db_table = self.m2m_db_table()
model = registered_tables.get(m2m_db_table)
# The second condition allows multiple m2m relations on a model if
# some point to a through model that proxies another through model.
if (
model
and model._meta.concrete_model
!= self.remote_field.through._meta.concrete_model
):
if model._meta.auto_created:
def _get_field_name(model):
for field in model._meta.auto_created._meta.many_to_many:
if field.remote_field.through is model:
return field.name
opts = model._meta.auto_created._meta
clashing_obj = "%s.%s" % (opts.label, _get_field_name(model))
else:
clashing_obj = model._meta.label
if settings.DATABASE_ROUTERS:
error_class, error_id = checks.Warning, "fields.W344"
error_hint = (
"You have configured settings.DATABASE_ROUTERS. Verify "
"that the table of %r is correctly routed to a separate "
"database." % clashing_obj
)
else:
error_class, error_id = checks.Error, "fields.E340"
error_hint = None
return [
error_class(
"The field's intermediary table '%s' clashes with the "
"table name of '%s'." % (m2m_db_table, clashing_obj),
obj=self,
hint=error_hint,
id=error_id,
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# Handle the simpler arguments.
if self.db_table is not None:
kwargs["db_table"] = self.db_table
if self.remote_field.db_constraint is not True:
kwargs["db_constraint"] = self.remote_field.db_constraint
# Lowercase model names as they should be treated as case-insensitive.
if isinstance(self.remote_field.model, str):
if "." in self.remote_field.model:
app_label, model_name = self.remote_field.model.split(".")
kwargs["to"] = "%s.%s" % (app_label, model_name.lower())
else:
kwargs["to"] = self.remote_field.model.lower()
else:
kwargs["to"] = self.remote_field.model._meta.label_lower
if getattr(self.remote_field, "through", None) is not None:
if isinstance(self.remote_field.through, str):
kwargs["through"] = self.remote_field.through
elif not self.remote_field.through._meta.auto_created:
kwargs["through"] = self.remote_field.through._meta.label
# If swappable is True, then see if we're actually pointing to the target
# of a swap.
swappable_setting = self.swappable_setting
if swappable_setting is not None:
# If it's already a settings reference, error.
if hasattr(kwargs["to"], "setting_name"):
if kwargs["to"].setting_name != swappable_setting:
raise ValueError(
"Cannot deconstruct a ManyToManyField pointing to a "
"model that is swapped in place of more than one model "
"(%s and %s)" % (kwargs["to"].setting_name, swappable_setting)
)
kwargs["to"] = SettingsReference(
kwargs["to"],
swappable_setting,
)
return name, path, args, kwargs
def _get_path_info(self, direct=False, filtered_relation=None):
"""Called by both direct and indirect m2m traversal."""
int_model = self.remote_field.through
linkfield1 = int_model._meta.get_field(self.m2m_field_name())
linkfield2 = int_model._meta.get_field(self.m2m_reverse_field_name())
if direct:
join1infos = linkfield1.reverse_path_infos
if filtered_relation:
join2infos = linkfield2.get_path_info(filtered_relation)
else:
join2infos = linkfield2.path_infos
else:
join1infos = linkfield2.reverse_path_infos
if filtered_relation:
join2infos = linkfield1.get_path_info(filtered_relation)
else:
join2infos = linkfield1.path_infos
# Get join infos between the last model of join 1 and the first model
# of join 2. Assume the only reason these may differ is due to model
# inheritance.
join1_final = join1infos[-1].to_opts
join2_initial = join2infos[0].from_opts
if join1_final is join2_initial:
intermediate_infos = []
elif issubclass(join1_final.model, join2_initial.model):
intermediate_infos = join1_final.get_path_to_parent(join2_initial.model)
else:
intermediate_infos = join2_initial.get_path_from_parent(join1_final.model)
return [*join1infos, *intermediate_infos, *join2infos]
def get_path_info(self, filtered_relation=None):
return self._get_path_info(direct=True, filtered_relation=filtered_relation)
@cached_property
def path_infos(self):
return self.get_path_info()
def get_reverse_path_info(self, filtered_relation=None):
return self._get_path_info(direct=False, filtered_relation=filtered_relation)
@cached_property
def reverse_path_infos(self):
return self.get_reverse_path_info()
def _get_m2m_db_table(self, opts):
"""
Function that can be curried to provide the m2m table name for this
relation.
"""
if self.remote_field.through is not None:
return self.remote_field.through._meta.db_table
elif self.db_table:
return self.db_table
else:
m2m_table_name = "%s_%s" % (utils.strip_quotes(opts.db_table), self.name)
return utils.truncate_name(m2m_table_name, connection.ops.max_name_length())
def _get_m2m_attr(self, related, attr):
"""
Function that can be curried to provide the source accessor or DB
column name for the m2m table.
"""
cache_attr = "_m2m_%s_cache" % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
if self.remote_field.through_fields is not None:
link_field_name = self.remote_field.through_fields[0]
else:
link_field_name = None
for f in self.remote_field.through._meta.fields:
if (
f.is_relation
and f.remote_field.model == related.related_model
and (link_field_name is None or link_field_name == f.name)
):
setattr(self, cache_attr, getattr(f, attr))
return getattr(self, cache_attr)
def _get_m2m_reverse_attr(self, related, attr):
"""
Function that can be curried to provide the related accessor or DB
column name for the m2m table.
"""
cache_attr = "_m2m_reverse_%s_cache" % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
found = False
if self.remote_field.through_fields is not None:
link_field_name = self.remote_field.through_fields[1]
else:
link_field_name = None
for f in self.remote_field.through._meta.fields:
if f.is_relation and f.remote_field.model == related.model:
if link_field_name is None and related.related_model == related.model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
setattr(self, cache_attr, getattr(f, attr))
break
else:
found = True
elif link_field_name is None or link_field_name == f.name:
setattr(self, cache_attr, getattr(f, attr))
break
return getattr(self, cache_attr)
def contribute_to_class(self, cls, name, **kwargs):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.remote_field.symmetrical and (
self.remote_field.model == RECURSIVE_RELATIONSHIP_CONSTANT
or self.remote_field.model == cls._meta.object_name
):
self.remote_field.related_name = "%s_rel_+" % name
elif self.remote_field.is_hidden():
# If the backwards relation is disabled, replace the original
# related_name with one generated from the m2m field name. Django
# still uses backwards relations internally and we need to avoid
# clashes between multiple m2m fields with related_name == '+'.
self.remote_field.related_name = "_%s_%s_%s_+" % (
cls._meta.app_label,
cls.__name__.lower(),
name,
)
super().contribute_to_class(cls, name, **kwargs)
# The intermediate m2m model is not auto created if:
# 1) There is a manually specified intermediate, or
# 2) The class owning the m2m field is abstract.
# 3) The class owning the m2m field has been swapped out.
if not cls._meta.abstract:
if self.remote_field.through:
def resolve_through_model(_, model, field):
field.remote_field.through = model
lazy_related_operation(
resolve_through_model, cls, self.remote_field.through, field=self
)
elif not cls._meta.swapped:
self.remote_field.through = create_many_to_many_intermediary_model(
self, cls
)
# Add the descriptor for the m2m relation.
setattr(cls, self.name, ManyToManyDescriptor(self.remote_field, reverse=False))
# Set up the accessor for the m2m table name for the relation.
self.m2m_db_table = partial(self._get_m2m_db_table, cls._meta)
def contribute_to_related_class(self, cls, related):
# Internal M2Ms (i.e., those with a related name ending with '+')
# and swapped models don't get a related descriptor.
if (
not self.remote_field.is_hidden()
and not related.related_model._meta.swapped
):
setattr(
cls,
related.get_accessor_name(),
ManyToManyDescriptor(self.remote_field, reverse=True),
)
# Set up the accessors for the column names on the m2m table.
self.m2m_column_name = partial(self._get_m2m_attr, related, "column")
self.m2m_reverse_name = partial(self._get_m2m_reverse_attr, related, "column")
self.m2m_field_name = partial(self._get_m2m_attr, related, "name")
self.m2m_reverse_field_name = partial(
self._get_m2m_reverse_attr, related, "name"
)
get_m2m_rel = partial(self._get_m2m_attr, related, "remote_field")
self.m2m_target_field_name = lambda: get_m2m_rel().field_name
get_m2m_reverse_rel = partial(
self._get_m2m_reverse_attr, related, "remote_field"
)
self.m2m_reverse_target_field_name = lambda: get_m2m_reverse_rel().field_name
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
return [] if obj.pk is None else list(getattr(obj, self.attname).all())
def save_form_data(self, instance, data):
getattr(instance, self.attname).set(data)
def formfield(self, *, using=None, **kwargs):
defaults = {
"form_class": forms.ModelMultipleChoiceField,
"queryset": self.remote_field.model._default_manager.using(using),
**kwargs,
}
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get("initial") is not None:
initial = defaults["initial"]
if callable(initial):
initial = initial()
defaults["initial"] = [i.pk for i in initial]
return super().formfield(**defaults)
def db_check(self, connection):
return None
def db_type(self, connection):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
def db_parameters(self, connection):
return {"type": None, "check": None}
|
84442a09c24559e49cc46eb7350c7e1f596feeeaca4ce41b482bf56cd91eb6bf | """
Create SQL statements for QuerySets.
The code in here encapsulates all of the SQL construction so that QuerySets
themselves do not have to (and could be backed by things other than SQL
databases). The abstraction barrier only works one way: this module has to know
all about the internals of models in order to get the information it needs.
"""
import copy
import difflib
import functools
import sys
from collections import Counter, namedtuple
from collections.abc import Iterator, Mapping
from itertools import chain, count, product
from string import ascii_uppercase
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db import DEFAULT_DB_ALIAS, NotSupportedError, connections
from django.db.models.aggregates import Count
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import (
BaseExpression,
Col,
Exists,
F,
OuterRef,
Ref,
ResolvedOuterRef,
Value,
)
from django.db.models.fields import Field
from django.db.models.fields.related_lookups import MultiColSource
from django.db.models.lookups import Lookup
from django.db.models.query_utils import (
Q,
check_rel_lookup_compatibility,
refs_expression,
)
from django.db.models.sql.constants import INNER, LOUTER, ORDER_DIR, SINGLE
from django.db.models.sql.datastructures import BaseTable, Empty, Join, MultiJoin
from django.db.models.sql.where import AND, OR, ExtraWhere, NothingNode, WhereNode
from django.utils.functional import cached_property
from django.utils.regex_helper import _lazy_re_compile
from django.utils.tree import Node
__all__ = ["Query", "RawQuery"]
# Quotation marks ('"`[]), whitespace characters, semicolons, or inline
# SQL comments are forbidden in column aliases.
FORBIDDEN_ALIAS_PATTERN = _lazy_re_compile(r"['`\"\]\[;\s]|--|/\*|\*/")
# Inspired from
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
EXPLAIN_OPTIONS_PATTERN = _lazy_re_compile(r"[\w\-]+")
def get_field_names_from_opts(opts):
if opts is None:
return set()
return set(
chain.from_iterable(
(f.name, f.attname) if f.concrete else (f.name,) for f in opts.get_fields()
)
)
def get_children_from_q(q):
for child in q.children:
if isinstance(child, Node):
yield from get_children_from_q(child)
else:
yield child
JoinInfo = namedtuple(
"JoinInfo",
("final_field", "targets", "opts", "joins", "path", "transform_function"),
)
class RawQuery:
"""A single raw SQL query."""
def __init__(self, sql, using, params=()):
self.params = params
self.sql = sql
self.using = using
self.cursor = None
# Mirror some properties of a normal query so that
# the compiler can be used to process results.
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.extra_select = {}
self.annotation_select = {}
def chain(self, using):
return self.clone(using)
def clone(self, using):
return RawQuery(self.sql, using, params=self.params)
def get_columns(self):
if self.cursor is None:
self._execute_query()
converter = connections[self.using].introspection.identifier_converter
return [converter(column_meta[0]) for column_meta in self.cursor.description]
def __iter__(self):
# Always execute a new query for a new iterator.
# This could be optimized with a cache at the expense of RAM.
self._execute_query()
if not connections[self.using].features.can_use_chunked_reads:
# If the database can't use chunked reads we need to make sure we
# evaluate the entire query up front.
result = list(self.cursor)
else:
result = self.cursor
return iter(result)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
@property
def params_type(self):
if self.params is None:
return None
return dict if isinstance(self.params, Mapping) else tuple
def __str__(self):
if self.params_type is None:
return self.sql
return self.sql % self.params_type(self.params)
def _execute_query(self):
connection = connections[self.using]
# Adapt parameters to the database, as much as possible considering
# that the target type isn't known. See #17755.
params_type = self.params_type
adapter = connection.ops.adapt_unknown_value
if params_type is tuple:
params = tuple(adapter(val) for val in self.params)
elif params_type is dict:
params = {key: adapter(val) for key, val in self.params.items()}
elif params_type is None:
params = None
else:
raise RuntimeError("Unexpected params type: %s" % params_type)
self.cursor = connection.cursor()
self.cursor.execute(self.sql, params)
ExplainInfo = namedtuple("ExplainInfo", ("format", "options"))
class Query(BaseExpression):
"""A single SQL query."""
alias_prefix = "T"
empty_result_set_value = None
subq_aliases = frozenset([alias_prefix])
compiler = "SQLCompiler"
base_table_class = BaseTable
join_class = Join
default_cols = True
default_ordering = True
standard_ordering = True
filter_is_sticky = False
subquery = False
# SQL-related attributes.
# Select and related select clauses are expressions to use in the SELECT
# clause of the query. The select is used for cases where we want to set up
# the select clause to contain other than default fields (values(),
# subqueries...). Note that annotations go to annotations dictionary.
select = ()
# The group_by attribute can have one of the following forms:
# - None: no group by at all in the query
# - A tuple of expressions: group by (at least) those expressions.
# String refs are also allowed for now.
# - True: group by all select fields of the model
# See compiler.get_group_by() for details.
group_by = None
order_by = ()
low_mark = 0 # Used for offset/limit.
high_mark = None # Used for offset/limit.
distinct = False
distinct_fields = ()
select_for_update = False
select_for_update_nowait = False
select_for_update_skip_locked = False
select_for_update_of = ()
select_for_no_key_update = False
select_related = False
# Arbitrary limit for select_related to prevents infinite recursion.
max_depth = 5
# Holds the selects defined by a call to values() or values_list()
# excluding annotation_select and extra_select.
values_select = ()
# SQL annotation-related attributes.
annotation_select_mask = None
_annotation_select_cache = None
# Set combination attributes.
combinator = None
combinator_all = False
combined_queries = ()
# These are for extensions. The contents are more or less appended verbatim
# to the appropriate clause.
extra_select_mask = None
_extra_select_cache = None
extra_tables = ()
extra_order_by = ()
# A tuple that is a set of model field names and either True, if these are
# the fields to defer, or False if these are the only fields to load.
deferred_loading = (frozenset(), True)
explain_info = None
def __init__(self, model, alias_cols=True):
self.model = model
self.alias_refcount = {}
# alias_map is the most important data structure regarding joins.
# It's used for recording which joins exist in the query and what
# types they are. The key is the alias of the joined table (possibly
# the table name) and the value is a Join-like object (see
# sql.datastructures.Join for more information).
self.alias_map = {}
# Whether to provide alias to columns during reference resolving.
self.alias_cols = alias_cols
# Sometimes the query contains references to aliases in outer queries (as
# a result of split_exclude). Correct alias quoting needs to know these
# aliases too.
# Map external tables to whether they are aliased.
self.external_aliases = {}
self.table_map = {} # Maps table names to list of aliases.
self.used_aliases = set()
self.where = WhereNode()
# Maps alias -> Annotation Expression.
self.annotations = {}
# These are for extensions. The contents are more or less appended
# verbatim to the appropriate clause.
self.extra = {} # Maps col_alias -> (col_sql, params).
self._filtered_relations = {}
@property
def output_field(self):
if len(self.select) == 1:
select = self.select[0]
return getattr(select, "target", None) or select.field
elif len(self.annotation_select) == 1:
return next(iter(self.annotation_select.values())).output_field
@property
def has_select_fields(self):
return bool(
self.select or self.annotation_select_mask or self.extra_select_mask
)
@cached_property
def base_table(self):
for alias in self.alias_map:
return alias
def __str__(self):
"""
Return the query as a string of SQL with the parameter values
substituted in (use sql_with_params() to see the unsubstituted string).
Parameter values won't necessarily be quoted correctly, since that is
done by the database interface at execution time.
"""
sql, params = self.sql_with_params()
return sql % params
def sql_with_params(self):
"""
Return the query as an SQL string and the parameters that will be
substituted into the query.
"""
return self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
def __deepcopy__(self, memo):
"""Limit the amount of work when a Query is deepcopied."""
result = self.clone()
memo[id(self)] = result
return result
def get_compiler(self, using=None, connection=None, elide_empty=True):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
return connection.ops.compiler(self.compiler)(
self, connection, using, elide_empty
)
def get_meta(self):
"""
Return the Options instance (the model._meta) from which to start
processing. Normally, this is self.model._meta, but it can be changed
by subclasses.
"""
if self.model:
return self.model._meta
def clone(self):
"""
Return a copy of the current Query. A lightweight alternative to
to deepcopy().
"""
obj = Empty()
obj.__class__ = self.__class__
# Copy references to everything.
obj.__dict__ = self.__dict__.copy()
# Clone attributes that can't use shallow copy.
obj.alias_refcount = self.alias_refcount.copy()
obj.alias_map = self.alias_map.copy()
obj.external_aliases = self.external_aliases.copy()
obj.table_map = self.table_map.copy()
obj.where = self.where.clone()
obj.annotations = self.annotations.copy()
if self.annotation_select_mask is not None:
obj.annotation_select_mask = self.annotation_select_mask.copy()
if self.combined_queries:
obj.combined_queries = tuple(
[query.clone() for query in self.combined_queries]
)
# _annotation_select_cache cannot be copied, as doing so breaks the
# (necessary) state in which both annotations and
# _annotation_select_cache point to the same underlying objects.
# It will get re-populated in the cloned queryset the next time it's
# used.
obj._annotation_select_cache = None
obj.extra = self.extra.copy()
if self.extra_select_mask is not None:
obj.extra_select_mask = self.extra_select_mask.copy()
if self._extra_select_cache is not None:
obj._extra_select_cache = self._extra_select_cache.copy()
if self.select_related is not False:
# Use deepcopy because select_related stores fields in nested
# dicts.
obj.select_related = copy.deepcopy(obj.select_related)
if "subq_aliases" in self.__dict__:
obj.subq_aliases = self.subq_aliases.copy()
obj.used_aliases = self.used_aliases.copy()
obj._filtered_relations = self._filtered_relations.copy()
# Clear the cached_property, if it exists.
obj.__dict__.pop("base_table", None)
return obj
def chain(self, klass=None):
"""
Return a copy of the current Query that's ready for another operation.
The klass argument changes the type of the Query, e.g. UpdateQuery.
"""
obj = self.clone()
if klass and obj.__class__ != klass:
obj.__class__ = klass
if not obj.filter_is_sticky:
obj.used_aliases = set()
obj.filter_is_sticky = False
if hasattr(obj, "_setup_query"):
obj._setup_query()
return obj
def relabeled_clone(self, change_map):
clone = self.clone()
clone.change_aliases(change_map)
return clone
def _get_col(self, target, field, alias):
if not self.alias_cols:
alias = None
return target.get_col(alias, field)
def rewrite_cols(self, annotation, col_cnt):
# We must make sure the inner query has the referred columns in it.
# If we are aggregating over an annotation, then Django uses Ref()
# instances to note this. However, if we are annotating over a column
# of a related model, then it might be that column isn't part of the
# SELECT clause of the inner query, and we must manually make sure
# the column is selected. An example case is:
# .aggregate(Sum('author__awards'))
# Resolving this expression results in a join to author, but there
# is no guarantee the awards column of author is in the select clause
# of the query. Thus we must manually add the column to the inner
# query.
orig_exprs = annotation.get_source_expressions()
new_exprs = []
for expr in orig_exprs:
# FIXME: These conditions are fairly arbitrary. Identify a better
# method of having expressions decide which code path they should
# take.
if isinstance(expr, Ref):
# Its already a Ref to subquery (see resolve_ref() for
# details)
new_exprs.append(expr)
elif isinstance(expr, (WhereNode, Lookup)):
# Decompose the subexpressions further. The code here is
# copied from the else clause, but this condition must appear
# before the contains_aggregate/is_summary condition below.
new_expr, col_cnt = self.rewrite_cols(expr, col_cnt)
new_exprs.append(new_expr)
else:
# Reuse aliases of expressions already selected in subquery.
for col_alias, selected_annotation in self.annotation_select.items():
if selected_annotation is expr:
new_expr = Ref(col_alias, expr)
break
else:
# An expression that is not selected the subquery.
if isinstance(expr, Col) or (
expr.contains_aggregate and not expr.is_summary
):
# Reference column or another aggregate. Select it
# under a non-conflicting alias.
col_cnt += 1
col_alias = "__col%d" % col_cnt
self.annotations[col_alias] = expr
self.append_annotation_mask([col_alias])
new_expr = Ref(col_alias, expr)
else:
# Some other expression not referencing database values
# directly. Its subexpression might contain Cols.
new_expr, col_cnt = self.rewrite_cols(expr, col_cnt)
new_exprs.append(new_expr)
annotation.set_source_expressions(new_exprs)
return annotation, col_cnt
def get_aggregation(self, using, added_aggregate_names):
"""
Return the dictionary with the values of the existing aggregations.
"""
if not self.annotation_select:
return {}
existing_annotations = [
annotation
for alias, annotation in self.annotations.items()
if alias not in added_aggregate_names
]
# Decide if we need to use a subquery.
#
# Existing annotations would cause incorrect results as get_aggregation()
# must produce just one result and thus must not use GROUP BY. But we
# aren't smart enough to remove the existing annotations from the
# query, so those would force us to use GROUP BY.
#
# If the query has limit or distinct, or uses set operations, then
# those operations must be done in a subquery so that the query
# aggregates on the limit and/or distinct results instead of applying
# the distinct and limit after the aggregation.
if (
isinstance(self.group_by, tuple)
or self.is_sliced
or existing_annotations
or self.distinct
or self.combinator
):
from django.db.models.sql.subqueries import AggregateQuery
inner_query = self.clone()
inner_query.subquery = True
outer_query = AggregateQuery(self.model, inner_query)
inner_query.select_for_update = False
inner_query.select_related = False
inner_query.set_annotation_mask(self.annotation_select)
# Queries with distinct_fields need ordering and when a limit is
# applied we must take the slice from the ordered query. Otherwise
# no need for ordering.
inner_query.clear_ordering(force=False)
if not inner_query.distinct:
# If the inner query uses default select and it has some
# aggregate annotations, then we must make sure the inner
# query is grouped by the main model's primary key. However,
# clearing the select clause can alter results if distinct is
# used.
has_existing_aggregate_annotations = any(
annotation
for annotation in existing_annotations
if getattr(annotation, "contains_aggregate", True)
)
if inner_query.default_cols and has_existing_aggregate_annotations:
inner_query.group_by = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
inner_query.default_cols = False
relabels = {t: "subquery" for t in inner_query.alias_map}
relabels[None] = "subquery"
# Remove any aggregates marked for reduction from the subquery
# and move them to the outer AggregateQuery.
col_cnt = 0
for alias, expression in list(inner_query.annotation_select.items()):
annotation_select_mask = inner_query.annotation_select_mask
if expression.is_summary:
expression, col_cnt = inner_query.rewrite_cols(expression, col_cnt)
outer_query.annotations[alias] = expression.relabeled_clone(
relabels
)
del inner_query.annotations[alias]
annotation_select_mask.remove(alias)
# Make sure the annotation_select wont use cached results.
inner_query.set_annotation_mask(inner_query.annotation_select_mask)
if (
inner_query.select == ()
and not inner_query.default_cols
and not inner_query.annotation_select_mask
):
# In case of Model.objects[0:3].count(), there would be no
# field selected in the inner query, yet we must use a subquery.
# So, make sure at least one field is selected.
inner_query.select = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
else:
outer_query = self
self.select = ()
self.default_cols = False
self.extra = {}
empty_set_result = [
expression.empty_result_set_value
for expression in outer_query.annotation_select.values()
]
elide_empty = not any(result is NotImplemented for result in empty_set_result)
outer_query.clear_ordering(force=True)
outer_query.clear_limits()
outer_query.select_for_update = False
outer_query.select_related = False
compiler = outer_query.get_compiler(using, elide_empty=elide_empty)
result = compiler.execute_sql(SINGLE)
if result is None:
result = empty_set_result
converters = compiler.get_converters(outer_query.annotation_select.values())
result = next(compiler.apply_converters((result,), converters))
return dict(zip(outer_query.annotation_select, result))
def get_count(self, using):
"""
Perform a COUNT() query using the current filter constraints.
"""
obj = self.clone()
obj.add_annotation(Count("*"), alias="__count", is_summary=True)
return obj.get_aggregation(using, ["__count"])["__count"]
def has_filters(self):
return self.where
def exists(self, using, limit=True):
q = self.clone()
if not (q.distinct and q.is_sliced):
if q.group_by is True:
q.add_fields(
(f.attname for f in self.model._meta.concrete_fields), False
)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
q.set_group_by(allow_aliases=False)
q.clear_select_clause()
if q.combined_queries and q.combinator == "union":
limit_combined = connections[
using
].features.supports_slicing_ordering_in_compound
q.combined_queries = tuple(
combined_query.exists(using, limit=limit_combined)
for combined_query in q.combined_queries
)
q.clear_ordering(force=True)
if limit:
q.set_limits(high=1)
q.add_annotation(Value(1), "a")
return q
def has_results(self, using):
q = self.exists(using)
compiler = q.get_compiler(using=using)
return compiler.has_results()
def explain(self, using, format=None, **options):
q = self.clone()
for option_name in options:
if (
not EXPLAIN_OPTIONS_PATTERN.fullmatch(option_name)
or "--" in option_name
):
raise ValueError(f"Invalid option name: {option_name!r}.")
q.explain_info = ExplainInfo(format, options)
compiler = q.get_compiler(using=using)
return "\n".join(compiler.explain_query())
def combine(self, rhs, connector):
"""
Merge the 'rhs' query into the current one (with any 'rhs' effects
being applied *after* (that is, "to the right of") anything in the
current query. 'rhs' is not modified during a call to this function.
The 'connector' parameter describes how to connect filters from the
'rhs' query.
"""
if self.model != rhs.model:
raise TypeError("Cannot combine queries on two different base models.")
if self.is_sliced:
raise TypeError("Cannot combine queries once a slice has been taken.")
if self.distinct != rhs.distinct:
raise TypeError("Cannot combine a unique query with a non-unique query.")
if self.distinct_fields != rhs.distinct_fields:
raise TypeError("Cannot combine queries with different distinct fields.")
# If lhs and rhs shares the same alias prefix, it is possible to have
# conflicting alias changes like T4 -> T5, T5 -> T6, which might end up
# as T4 -> T6 while combining two querysets. To prevent this, change an
# alias prefix of the rhs and update current aliases accordingly,
# except if the alias is the base table since it must be present in the
# query on both sides.
initial_alias = self.get_initial_alias()
rhs.bump_prefix(self, exclude={initial_alias})
# Work out how to relabel the rhs aliases, if necessary.
change_map = {}
conjunction = connector == AND
# Determine which existing joins can be reused. When combining the
# query with AND we must recreate all joins for m2m filters. When
# combining with OR we can reuse joins. The reason is that in AND
# case a single row can't fulfill a condition like:
# revrel__col=1 & revrel__col=2
# But, there might be two different related rows matching this
# condition. In OR case a single True is enough, so single row is
# enough, too.
#
# Note that we will be creating duplicate joins for non-m2m joins in
# the AND case. The results will be correct but this creates too many
# joins. This is something that could be fixed later on.
reuse = set() if conjunction else set(self.alias_map)
joinpromoter = JoinPromoter(connector, 2, False)
joinpromoter.add_votes(
j for j in self.alias_map if self.alias_map[j].join_type == INNER
)
rhs_votes = set()
# Now, add the joins from rhs query into the new query (skipping base
# table).
rhs_tables = list(rhs.alias_map)[1:]
for alias in rhs_tables:
join = rhs.alias_map[alias]
# If the left side of the join was already relabeled, use the
# updated alias.
join = join.relabeled_clone(change_map)
new_alias = self.join(join, reuse=reuse)
if join.join_type == INNER:
rhs_votes.add(new_alias)
# We can't reuse the same join again in the query. If we have two
# distinct joins for the same connection in rhs query, then the
# combined query must have two joins, too.
reuse.discard(new_alias)
if alias != new_alias:
change_map[alias] = new_alias
if not rhs.alias_refcount[alias]:
# The alias was unused in the rhs query. Unref it so that it
# will be unused in the new query, too. We have to add and
# unref the alias so that join promotion has information of
# the join type for the unused alias.
self.unref_alias(new_alias)
joinpromoter.add_votes(rhs_votes)
joinpromoter.update_join_types(self)
# Combine subqueries aliases to ensure aliases relabelling properly
# handle subqueries when combining where and select clauses.
self.subq_aliases |= rhs.subq_aliases
# Now relabel a copy of the rhs where-clause and add it to the current
# one.
w = rhs.where.clone()
w.relabel_aliases(change_map)
self.where.add(w, connector)
# Selection columns and extra extensions are those provided by 'rhs'.
if rhs.select:
self.set_select([col.relabeled_clone(change_map) for col in rhs.select])
else:
self.select = ()
if connector == OR:
# It would be nice to be able to handle this, but the queries don't
# really make sense (or return consistent value sets). Not worth
# the extra complexity when you can write a real query instead.
if self.extra and rhs.extra:
raise ValueError(
"When merging querysets using 'or', you cannot have "
"extra(select=...) on both sides."
)
self.extra.update(rhs.extra)
extra_select_mask = set()
if self.extra_select_mask is not None:
extra_select_mask.update(self.extra_select_mask)
if rhs.extra_select_mask is not None:
extra_select_mask.update(rhs.extra_select_mask)
if extra_select_mask:
self.set_extra_mask(extra_select_mask)
self.extra_tables += rhs.extra_tables
# Ordering uses the 'rhs' ordering, unless it has none, in which case
# the current ordering is used.
self.order_by = rhs.order_by or self.order_by
self.extra_order_by = rhs.extra_order_by or self.extra_order_by
def deferred_to_data(self, target):
"""
Convert the self.deferred_loading data structure to an alternate data
structure, describing the field that *will* be loaded. This is used to
compute the columns to select from the database and also by the
QuerySet class to work out which fields are being initialized on each
model. Models that have all their fields included aren't mentioned in
the result, only those that have field restrictions in place.
The "target" parameter is the instance that is populated (in place).
"""
field_names, defer = self.deferred_loading
if not field_names:
return
orig_opts = self.get_meta()
seen = {}
must_include = {orig_opts.concrete_model: {orig_opts.pk}}
for field_name in field_names:
parts = field_name.split(LOOKUP_SEP)
cur_model = self.model._meta.concrete_model
opts = orig_opts
for name in parts[:-1]:
old_model = cur_model
if name in self._filtered_relations:
name = self._filtered_relations[name].relation_name
source = opts.get_field(name)
if is_reverse_o2o(source):
cur_model = source.related_model
else:
cur_model = source.remote_field.model
opts = cur_model._meta
# Even if we're "just passing through" this model, we must add
# both the current model's pk and the related reference field
# (if it's not a reverse relation) to the things we select.
if not is_reverse_o2o(source):
must_include[old_model].add(source)
add_to_dict(must_include, cur_model, opts.pk)
field = opts.get_field(parts[-1])
is_reverse_object = field.auto_created and not field.concrete
model = field.related_model if is_reverse_object else field.model
model = model._meta.concrete_model
if model == opts.model:
model = cur_model
if not is_reverse_o2o(field):
add_to_dict(seen, model, field)
if defer:
# We need to load all fields for each model, except those that
# appear in "seen" (for all models that appear in "seen"). The only
# slight complexity here is handling fields that exist on parent
# models.
workset = {}
for model, values in seen.items():
for field in model._meta.local_fields:
if field not in values:
m = field.model._meta.concrete_model
add_to_dict(workset, m, field)
for model, values in must_include.items():
# If we haven't included a model in workset, we don't add the
# corresponding must_include fields for that model, since an
# empty set means "include all fields". That's why there's no
# "else" branch here.
if model in workset:
workset[model].update(values)
for model, fields in workset.items():
target[model] = {f.attname for f in fields}
else:
for model, values in must_include.items():
if model in seen:
seen[model].update(values)
else:
# As we've passed through this model, but not explicitly
# included any fields, we have to make sure it's mentioned
# so that only the "must include" fields are pulled in.
seen[model] = values
# Now ensure that every model in the inheritance chain is mentioned
# in the parent list. Again, it must be mentioned to ensure that
# only "must include" fields are pulled in.
for model in orig_opts.get_parent_list():
seen.setdefault(model, set())
for model, fields in seen.items():
target[model] = {f.attname for f in fields}
def table_alias(self, table_name, create=False, filtered_relation=None):
"""
Return a table alias for the given table_name and whether this is a
new alias or not.
If 'create' is true, a new alias is always created. Otherwise, the
most recently created alias for the table (if one exists) is reused.
"""
alias_list = self.table_map.get(table_name)
if not create and alias_list:
alias = alias_list[0]
self.alias_refcount[alias] += 1
return alias, False
# Create a new alias for this table.
if alias_list:
alias = "%s%d" % (self.alias_prefix, len(self.alias_map) + 1)
alias_list.append(alias)
else:
# The first occurrence of a table uses the table name directly.
alias = (
filtered_relation.alias if filtered_relation is not None else table_name
)
self.table_map[table_name] = [alias]
self.alias_refcount[alias] = 1
return alias, True
def ref_alias(self, alias):
"""Increases the reference count for this alias."""
self.alias_refcount[alias] += 1
def unref_alias(self, alias, amount=1):
"""Decreases the reference count for this alias."""
self.alias_refcount[alias] -= amount
def promote_joins(self, aliases):
"""
Promote recursively the join type of given aliases and its children to
an outer join. If 'unconditional' is False, only promote the join if
it is nullable or the parent join is an outer join.
The children promotion is done to avoid join chains that contain a LOUTER
b INNER c. So, if we have currently a INNER b INNER c and a->b is promoted,
then we must also promote b->c automatically, or otherwise the promotion
of a->b doesn't actually change anything in the query results.
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type is None:
# This is the base table (first FROM entry) - this table
# isn't really joined at all in the query, so we should not
# alter its join type.
continue
# Only the first alias (skipped above) should have None join_type
assert self.alias_map[alias].join_type is not None
parent_alias = self.alias_map[alias].parent_alias
parent_louter = (
parent_alias and self.alias_map[parent_alias].join_type == LOUTER
)
already_louter = self.alias_map[alias].join_type == LOUTER
if (self.alias_map[alias].nullable or parent_louter) and not already_louter:
self.alias_map[alias] = self.alias_map[alias].promote()
# Join type of 'alias' changed, so re-examine all aliases that
# refer to this one.
aliases.extend(
join
for join in self.alias_map
if self.alias_map[join].parent_alias == alias
and join not in aliases
)
def demote_joins(self, aliases):
"""
Change join type from LOUTER to INNER for all joins in aliases.
Similarly to promote_joins(), this method must ensure no join chains
containing first an outer, then an inner join are generated. If we
are demoting b->c join in chain a LOUTER b LOUTER c then we must
demote a->b automatically, or otherwise the demotion of b->c doesn't
actually change anything in the query results. .
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type == LOUTER:
self.alias_map[alias] = self.alias_map[alias].demote()
parent_alias = self.alias_map[alias].parent_alias
if self.alias_map[parent_alias].join_type == INNER:
aliases.append(parent_alias)
def reset_refcounts(self, to_counts):
"""
Reset reference counts for aliases so that they match the value passed
in `to_counts`.
"""
for alias, cur_refcount in self.alias_refcount.copy().items():
unref_amount = cur_refcount - to_counts.get(alias, 0)
self.unref_alias(alias, unref_amount)
def change_aliases(self, change_map):
"""
Change the aliases in change_map (which maps old-alias -> new-alias),
relabelling any references to them in select columns and the where
clause.
"""
# If keys and values of change_map were to intersect, an alias might be
# updated twice (e.g. T4 -> T5, T5 -> T6, so also T4 -> T6) depending
# on their order in change_map.
assert set(change_map).isdisjoint(change_map.values())
# 1. Update references in "select" (normal columns plus aliases),
# "group by" and "where".
self.where.relabel_aliases(change_map)
if isinstance(self.group_by, tuple):
self.group_by = tuple(
[col.relabeled_clone(change_map) for col in self.group_by]
)
self.select = tuple([col.relabeled_clone(change_map) for col in self.select])
self.annotations = self.annotations and {
key: col.relabeled_clone(change_map)
for key, col in self.annotations.items()
}
# 2. Rename the alias in the internal table/alias datastructures.
for old_alias, new_alias in change_map.items():
if old_alias not in self.alias_map:
continue
alias_data = self.alias_map[old_alias].relabeled_clone(change_map)
self.alias_map[new_alias] = alias_data
self.alias_refcount[new_alias] = self.alias_refcount[old_alias]
del self.alias_refcount[old_alias]
del self.alias_map[old_alias]
table_aliases = self.table_map[alias_data.table_name]
for pos, alias in enumerate(table_aliases):
if alias == old_alias:
table_aliases[pos] = new_alias
break
self.external_aliases = {
# Table is aliased or it's being changed and thus is aliased.
change_map.get(alias, alias): (aliased or alias in change_map)
for alias, aliased in self.external_aliases.items()
}
def bump_prefix(self, other_query, exclude=None):
"""
Change the alias prefix to the next letter in the alphabet in a way
that the other query's aliases and this query's aliases will not
conflict. Even tables that previously had no alias will get an alias
after this call. To prevent changing aliases use the exclude parameter.
"""
def prefix_gen():
"""
Generate a sequence of characters in alphabetical order:
-> 'A', 'B', 'C', ...
When the alphabet is finished, the sequence will continue with the
Cartesian product:
-> 'AA', 'AB', 'AC', ...
"""
alphabet = ascii_uppercase
prefix = chr(ord(self.alias_prefix) + 1)
yield prefix
for n in count(1):
seq = alphabet[alphabet.index(prefix) :] if prefix else alphabet
for s in product(seq, repeat=n):
yield "".join(s)
prefix = None
if self.alias_prefix != other_query.alias_prefix:
# No clashes between self and outer query should be possible.
return
# Explicitly avoid infinite loop. The constant divider is based on how
# much depth recursive subquery references add to the stack. This value
# might need to be adjusted when adding or removing function calls from
# the code path in charge of performing these operations.
local_recursion_limit = sys.getrecursionlimit() // 16
for pos, prefix in enumerate(prefix_gen()):
if prefix not in self.subq_aliases:
self.alias_prefix = prefix
break
if pos > local_recursion_limit:
raise RecursionError(
"Maximum recursion depth exceeded: too many subqueries."
)
self.subq_aliases = self.subq_aliases.union([self.alias_prefix])
other_query.subq_aliases = other_query.subq_aliases.union(self.subq_aliases)
if exclude is None:
exclude = {}
self.change_aliases(
{
alias: "%s%d" % (self.alias_prefix, pos)
for pos, alias in enumerate(self.alias_map)
if alias not in exclude
}
)
def get_initial_alias(self):
"""
Return the first alias for this query, after increasing its reference
count.
"""
if self.alias_map:
alias = self.base_table
self.ref_alias(alias)
elif self.model:
alias = self.join(self.base_table_class(self.get_meta().db_table, None))
else:
alias = None
return alias
def count_active_tables(self):
"""
Return the number of tables in this query with a non-zero reference
count. After execution, the reference counts are zeroed, so tables
added in compiler will not be seen by this method.
"""
return len([1 for count in self.alias_refcount.values() if count])
def join(self, join, reuse=None, reuse_with_filtered_relation=False):
"""
Return an alias for the 'join', either reusing an existing alias for
that join or creating a new one. 'join' is either a base_table_class or
join_class.
The 'reuse' parameter can be either None which means all joins are
reusable, or it can be a set containing the aliases that can be reused.
The 'reuse_with_filtered_relation' parameter is used when computing
FilteredRelation instances.
A join is always created as LOUTER if the lhs alias is LOUTER to make
sure chains like t1 LOUTER t2 INNER t3 aren't generated. All new
joins are created as LOUTER if the join is nullable.
"""
if reuse_with_filtered_relation and reuse:
reuse_aliases = [
a for a, j in self.alias_map.items() if a in reuse and j.equals(join)
]
else:
reuse_aliases = [
a
for a, j in self.alias_map.items()
if (reuse is None or a in reuse) and j == join
]
if reuse_aliases:
if join.table_alias in reuse_aliases:
reuse_alias = join.table_alias
else:
# Reuse the most recent alias of the joined table
# (a many-to-many relation may be joined multiple times).
reuse_alias = reuse_aliases[-1]
self.ref_alias(reuse_alias)
return reuse_alias
# No reuse is possible, so we need a new alias.
alias, _ = self.table_alias(
join.table_name, create=True, filtered_relation=join.filtered_relation
)
if join.join_type:
if self.alias_map[join.parent_alias].join_type == LOUTER or join.nullable:
join_type = LOUTER
else:
join_type = INNER
join.join_type = join_type
join.table_alias = alias
self.alias_map[alias] = join
return alias
def join_parent_model(self, opts, model, alias, seen):
"""
Make sure the given 'model' is joined in the query. If 'model' isn't
a parent of 'opts' or if it is None this method is a no-op.
The 'alias' is the root alias for starting the join, 'seen' is a dict
of model -> alias of existing joins. It must also contain a mapping
of None -> some alias. This will be returned in the no-op case.
"""
if model in seen:
return seen[model]
chain = opts.get_base_chain(model)
if not chain:
return alias
curr_opts = opts
for int_model in chain:
if int_model in seen:
curr_opts = int_model._meta
alias = seen[int_model]
continue
# Proxy model have elements in base chain
# with no parents, assign the new options
# object and skip to the next base in that
# case
if not curr_opts.parents[int_model]:
curr_opts = int_model._meta
continue
link_field = curr_opts.get_ancestor_link(int_model)
join_info = self.setup_joins([link_field.name], curr_opts, alias)
curr_opts = int_model._meta
alias = seen[int_model] = join_info.joins[-1]
return alias or seen[None]
def check_alias(self, alias):
if FORBIDDEN_ALIAS_PATTERN.search(alias):
raise ValueError(
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
def add_annotation(self, annotation, alias, is_summary=False, select=True):
"""Add a single annotation expression to the Query."""
self.check_alias(alias)
annotation = annotation.resolve_expression(
self, allow_joins=True, reuse=None, summarize=is_summary
)
if select:
self.append_annotation_mask([alias])
else:
self.set_annotation_mask(set(self.annotation_select).difference({alias}))
self.annotations[alias] = annotation
def resolve_expression(self, query, *args, **kwargs):
clone = self.clone()
# Subqueries need to use a different set of aliases than the outer query.
clone.bump_prefix(query)
clone.subquery = True
clone.where.resolve_expression(query, *args, **kwargs)
# Resolve combined queries.
if clone.combinator:
clone.combined_queries = tuple(
[
combined_query.resolve_expression(query, *args, **kwargs)
for combined_query in clone.combined_queries
]
)
for key, value in clone.annotations.items():
resolved = value.resolve_expression(query, *args, **kwargs)
if hasattr(resolved, "external_aliases"):
resolved.external_aliases.update(clone.external_aliases)
clone.annotations[key] = resolved
# Outer query's aliases are considered external.
for alias, table in query.alias_map.items():
clone.external_aliases[alias] = (
isinstance(table, Join)
and table.join_field.related_model._meta.db_table != alias
) or (
isinstance(table, BaseTable) and table.table_name != table.table_alias
)
return clone
def get_external_cols(self):
exprs = chain(self.annotations.values(), self.where.children)
return [
col
for col in self._gen_cols(exprs, include_external=True)
if col.alias in self.external_aliases
]
def get_group_by_cols(self, alias=None):
if alias:
return [Ref(alias, self)]
external_cols = self.get_external_cols()
if any(col.possibly_multivalued for col in external_cols):
return [self]
return external_cols
def as_sql(self, compiler, connection):
# Some backends (e.g. Oracle) raise an error when a subquery contains
# unnecessary ORDER BY clause.
if (
self.subquery
and not connection.features.ignores_unnecessary_order_by_in_subqueries
):
self.clear_ordering(force=False)
sql, params = self.get_compiler(connection=connection).as_sql()
if self.subquery:
sql = "(%s)" % sql
return sql, params
def resolve_lookup_value(self, value, can_reuse, allow_joins):
if hasattr(value, "resolve_expression"):
value = value.resolve_expression(
self,
reuse=can_reuse,
allow_joins=allow_joins,
)
elif isinstance(value, (list, tuple)):
# The items of the iterable may be expressions and therefore need
# to be resolved independently.
values = (
self.resolve_lookup_value(sub_value, can_reuse, allow_joins)
for sub_value in value
)
type_ = type(value)
if hasattr(type_, "_make"): # namedtuple
return type_(*values)
return type_(values)
return value
def solve_lookup_type(self, lookup):
"""
Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains').
"""
lookup_splitted = lookup.split(LOOKUP_SEP)
if self.annotations:
expression, expression_lookups = refs_expression(
lookup_splitted, self.annotations
)
if expression:
return expression_lookups, (), expression
_, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta())
field_parts = lookup_splitted[0 : len(lookup_splitted) - len(lookup_parts)]
if len(lookup_parts) > 1 and not field_parts:
raise FieldError(
'Invalid lookup "%s" for model %s".'
% (lookup, self.get_meta().model.__name__)
)
return lookup_parts, field_parts, False
def check_query_object_type(self, value, opts, field):
"""
Check whether the object passed while querying is of the correct type.
If not, raise a ValueError specifying the wrong object.
"""
if hasattr(value, "_meta"):
if not check_rel_lookup_compatibility(value._meta.model, opts, field):
raise ValueError(
'Cannot query "%s": Must be "%s" instance.'
% (value, opts.object_name)
)
def check_related_objects(self, field, value, opts):
"""Check the type of object passed to query relations."""
if field.is_relation:
# Check that the field and the queryset use the same model in a
# query like .filter(author=Author.objects.all()). For example, the
# opts would be Author's (from the author field) and value.model
# would be Author.objects.all() queryset's .model (Author also).
# The field is the related field on the lhs side.
if (
isinstance(value, Query)
and not value.has_select_fields
and not check_rel_lookup_compatibility(value.model, opts, field)
):
raise ValueError(
'Cannot use QuerySet for "%s": Use a QuerySet for "%s".'
% (value.model._meta.object_name, opts.object_name)
)
elif hasattr(value, "_meta"):
self.check_query_object_type(value, opts, field)
elif hasattr(value, "__iter__"):
for v in value:
self.check_query_object_type(v, opts, field)
def check_filterable(self, expression):
"""Raise an error if expression cannot be used in a WHERE clause."""
if hasattr(expression, "resolve_expression") and not getattr(
expression, "filterable", True
):
raise NotSupportedError(
expression.__class__.__name__ + " is disallowed in the filter "
"clause."
)
if hasattr(expression, "get_source_expressions"):
for expr in expression.get_source_expressions():
self.check_filterable(expr)
def build_lookup(self, lookups, lhs, rhs):
"""
Try to extract transforms and lookup from given lhs.
The lhs value is something that works like SQLExpression.
The rhs value is what the lookup is going to compare against.
The lookups is a list of names to extract using get_lookup()
and get_transform().
"""
# __exact is the default lookup if one isn't given.
*transforms, lookup_name = lookups or ["exact"]
for name in transforms:
lhs = self.try_transform(lhs, name)
# First try get_lookup() so that the lookup takes precedence if the lhs
# supports both transform and lookup for the name.
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
if lhs.field.is_relation:
raise FieldError(
"Related Field got invalid lookup: {}".format(lookup_name)
)
# A lookup wasn't found. Try to interpret the name as a transform
# and do an Exact lookup against it.
lhs = self.try_transform(lhs, lookup_name)
lookup_name = "exact"
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
return
lookup = lookup_class(lhs, rhs)
# Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
# uses of None as a query value unless the lookup supports it.
if lookup.rhs is None and not lookup.can_use_none_as_rhs:
if lookup_name not in ("exact", "iexact"):
raise ValueError("Cannot use None as a query value")
return lhs.get_lookup("isnull")(lhs, True)
# For Oracle '' is equivalent to null. The check must be done at this
# stage because join promotion can't be done in the compiler. Using
# DEFAULT_DB_ALIAS isn't nice but it's the best that can be done here.
# A similar thing is done in is_nullable(), too.
if (
lookup_name == "exact"
and lookup.rhs == ""
and connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
):
return lhs.get_lookup("isnull")(lhs, True)
return lookup
def try_transform(self, lhs, name):
"""
Helper method for build_lookup(). Try to fetch and initialize
a transform for name parameter from lhs.
"""
transform_class = lhs.get_transform(name)
if transform_class:
return transform_class(lhs)
else:
output_field = lhs.output_field.__class__
suggested_lookups = difflib.get_close_matches(
name, output_field.get_lookups()
)
if suggested_lookups:
suggestion = ", perhaps you meant %s?" % " or ".join(suggested_lookups)
else:
suggestion = "."
raise FieldError(
"Unsupported lookup '%s' for %s or join on the field not "
"permitted%s" % (name, output_field.__name__, suggestion)
)
def build_filter(
self,
filter_expr,
branch_negated=False,
current_negated=False,
can_reuse=None,
allow_joins=True,
split_subq=True,
reuse_with_filtered_relation=False,
check_filterable=True,
):
"""
Build a WhereNode for a single filter clause but don't add it
to this Query. Query.add_q() will then add this filter to the where
Node.
The 'branch_negated' tells us if the current branch contains any
negations. This will be used to determine if subqueries are needed.
The 'current_negated' is used to determine if the current filter is
negated or not and this will be used to determine if IS NULL filtering
is needed.
The difference between current_negated and branch_negated is that
branch_negated is set on first negation, but current_negated is
flipped for each negation.
Note that add_filter will not do any negating itself, that is done
upper in the code by add_q().
The 'can_reuse' is a set of reusable joins for multijoins.
If 'reuse_with_filtered_relation' is True, then only joins in can_reuse
will be reused.
The method will create a filter clause that can be added to the current
query. However, if the filter isn't added to the query then the caller
is responsible for unreffing the joins used.
"""
if isinstance(filter_expr, dict):
raise FieldError("Cannot parse keyword query as dict")
if isinstance(filter_expr, Q):
return self._add_q(
filter_expr,
branch_negated=branch_negated,
current_negated=current_negated,
used_aliases=can_reuse,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
)
if hasattr(filter_expr, "resolve_expression"):
if not getattr(filter_expr, "conditional", False):
raise TypeError("Cannot filter against a non-conditional expression.")
condition = filter_expr.resolve_expression(self, allow_joins=allow_joins)
if not isinstance(condition, Lookup):
condition = self.build_lookup(["exact"], condition, True)
return WhereNode([condition], connector=AND), []
arg, value = filter_expr
if not arg:
raise FieldError("Cannot parse keyword query %r" % arg)
lookups, parts, reffed_expression = self.solve_lookup_type(arg)
if check_filterable:
self.check_filterable(reffed_expression)
if not allow_joins and len(parts) > 1:
raise FieldError("Joined field references are not permitted in this query")
pre_joins = self.alias_refcount.copy()
value = self.resolve_lookup_value(value, can_reuse, allow_joins)
used_joins = {
k for k, v in self.alias_refcount.items() if v > pre_joins.get(k, 0)
}
if check_filterable:
self.check_filterable(value)
if reffed_expression:
condition = self.build_lookup(lookups, reffed_expression, value)
return WhereNode([condition], connector=AND), []
opts = self.get_meta()
alias = self.get_initial_alias()
allow_many = not branch_negated or not split_subq
try:
join_info = self.setup_joins(
parts,
opts,
alias,
can_reuse=can_reuse,
allow_many=allow_many,
reuse_with_filtered_relation=reuse_with_filtered_relation,
)
# Prevent iterator from being consumed by check_related_objects()
if isinstance(value, Iterator):
value = list(value)
self.check_related_objects(join_info.final_field, value, join_info.opts)
# split_exclude() needs to know which joins were generated for the
# lookup parts
self._lookup_joins = join_info.joins
except MultiJoin as e:
return self.split_exclude(filter_expr, can_reuse, e.names_with_path)
# Update used_joins before trimming since they are reused to determine
# which joins could be later promoted to INNER.
used_joins.update(join_info.joins)
targets, alias, join_list = self.trim_joins(
join_info.targets, join_info.joins, join_info.path
)
if can_reuse is not None:
can_reuse.update(join_list)
if join_info.final_field.is_relation:
# No support for transforms for relational fields
num_lookups = len(lookups)
if num_lookups > 1:
raise FieldError(
"Related Field got invalid lookup: {}".format(lookups[0])
)
if len(targets) == 1:
col = self._get_col(targets[0], join_info.final_field, alias)
else:
col = MultiColSource(
alias, targets, join_info.targets, join_info.final_field
)
else:
col = self._get_col(targets[0], join_info.final_field, alias)
condition = self.build_lookup(lookups, col, value)
lookup_type = condition.lookup_name
clause = WhereNode([condition], connector=AND)
require_outer = (
lookup_type == "isnull" and condition.rhs is True and not current_negated
)
if (
current_negated
and (lookup_type != "isnull" or condition.rhs is False)
and condition.rhs is not None
):
require_outer = True
if lookup_type != "isnull":
# The condition added here will be SQL like this:
# NOT (col IS NOT NULL), where the first NOT is added in
# upper layers of code. The reason for addition is that if col
# is null, then col != someval will result in SQL "unknown"
# which isn't the same as in Python. The Python None handling
# is wanted, and it can be gotten by
# (col IS NULL OR col != someval)
# <=>
# NOT (col IS NOT NULL AND col = someval).
if (
self.is_nullable(targets[0])
or self.alias_map[join_list[-1]].join_type == LOUTER
):
lookup_class = targets[0].get_lookup("isnull")
col = self._get_col(targets[0], join_info.targets[0], alias)
clause.add(lookup_class(col, False), AND)
# If someval is a nullable column, someval IS NOT NULL is
# added.
if isinstance(value, Col) and self.is_nullable(value.target):
lookup_class = value.target.get_lookup("isnull")
clause.add(lookup_class(value, False), AND)
return clause, used_joins if not require_outer else ()
def add_filter(self, filter_lhs, filter_rhs):
self.add_q(Q((filter_lhs, filter_rhs)))
def add_q(self, q_object):
"""
A preprocessor for the internal _add_q(). Responsible for doing final
join promotion.
"""
# For join promotion this case is doing an AND for the added q_object
# and existing conditions. So, any existing inner join forces the join
# type to remain inner. Existing outer joins can however be demoted.
# (Consider case where rel_a is LOUTER and rel_a__col=1 is added - if
# rel_a doesn't produce any rows, then the whole condition must fail.
# So, demotion is OK.
existing_inner = {
a for a in self.alias_map if self.alias_map[a].join_type == INNER
}
clause, _ = self._add_q(q_object, self.used_aliases)
if clause:
self.where.add(clause, AND)
self.demote_joins(existing_inner)
def build_where(self, filter_expr):
return self.build_filter(filter_expr, allow_joins=False)[0]
def clear_where(self):
self.where = WhereNode()
def _add_q(
self,
q_object,
used_aliases,
branch_negated=False,
current_negated=False,
allow_joins=True,
split_subq=True,
check_filterable=True,
):
"""Add a Q-object to the current filter."""
connector = q_object.connector
current_negated = current_negated ^ q_object.negated
branch_negated = branch_negated or q_object.negated
target_clause = WhereNode(connector=connector, negated=q_object.negated)
joinpromoter = JoinPromoter(
q_object.connector, len(q_object.children), current_negated
)
for child in q_object.children:
child_clause, needed_inner = self.build_filter(
child,
can_reuse=used_aliases,
branch_negated=branch_negated,
current_negated=current_negated,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
)
joinpromoter.add_votes(needed_inner)
if child_clause:
target_clause.add(child_clause, connector)
needed_inner = joinpromoter.update_join_types(self)
return target_clause, needed_inner
def build_filtered_relation_q(
self, q_object, reuse, branch_negated=False, current_negated=False
):
"""Add a FilteredRelation object to the current filter."""
connector = q_object.connector
current_negated ^= q_object.negated
branch_negated = branch_negated or q_object.negated
target_clause = WhereNode(connector=connector, negated=q_object.negated)
for child in q_object.children:
if isinstance(child, Node):
child_clause = self.build_filtered_relation_q(
child,
reuse=reuse,
branch_negated=branch_negated,
current_negated=current_negated,
)
else:
child_clause, _ = self.build_filter(
child,
can_reuse=reuse,
branch_negated=branch_negated,
current_negated=current_negated,
allow_joins=True,
split_subq=False,
reuse_with_filtered_relation=True,
)
target_clause.add(child_clause, connector)
return target_clause
def add_filtered_relation(self, filtered_relation, alias):
filtered_relation.alias = alias
lookups = dict(get_children_from_q(filtered_relation.condition))
relation_lookup_parts, relation_field_parts, _ = self.solve_lookup_type(
filtered_relation.relation_name
)
if relation_lookup_parts:
raise ValueError(
"FilteredRelation's relation_name cannot contain lookups "
"(got %r)." % filtered_relation.relation_name
)
for lookup in chain(lookups):
lookup_parts, lookup_field_parts, _ = self.solve_lookup_type(lookup)
shift = 2 if not lookup_parts else 1
lookup_field_path = lookup_field_parts[:-shift]
for idx, lookup_field_part in enumerate(lookup_field_path):
if len(relation_field_parts) > idx:
if relation_field_parts[idx] != lookup_field_part:
raise ValueError(
"FilteredRelation's condition doesn't support "
"relations outside the %r (got %r)."
% (filtered_relation.relation_name, lookup)
)
else:
raise ValueError(
"FilteredRelation's condition doesn't support nested "
"relations deeper than the relation_name (got %r for "
"%r)." % (lookup, filtered_relation.relation_name)
)
self._filtered_relations[filtered_relation.alias] = filtered_relation
def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False):
"""
Walk the list of names and turns them into PathInfo tuples. A single
name in 'names' can generate multiple PathInfos (m2m, for example).
'names' is the path of names to travel, 'opts' is the model Options we
start the name resolving from, 'allow_many' is as for setup_joins().
If fail_on_missing is set to True, then a name that can't be resolved
will generate a FieldError.
Return a list of PathInfo tuples. In addition return the final field
(the last used join field) and target (which is a field guaranteed to
contain the same value as the final field). Finally, return those names
that weren't found (which are likely transforms and the final lookup).
"""
path, names_with_path = [], []
for pos, name in enumerate(names):
cur_names_with_path = (name, [])
if name == "pk":
name = opts.pk.name
field = None
filtered_relation = None
try:
if opts is None:
raise FieldDoesNotExist
field = opts.get_field(name)
except FieldDoesNotExist:
if name in self.annotation_select:
field = self.annotation_select[name].output_field
elif name in self._filtered_relations and pos == 0:
filtered_relation = self._filtered_relations[name]
if LOOKUP_SEP in filtered_relation.relation_name:
parts = filtered_relation.relation_name.split(LOOKUP_SEP)
filtered_relation_path, field, _, _ = self.names_to_path(
parts,
opts,
allow_many,
fail_on_missing,
)
path.extend(filtered_relation_path[:-1])
else:
field = opts.get_field(filtered_relation.relation_name)
if field is not None:
# Fields that contain one-to-many relations with a generic
# model (like a GenericForeignKey) cannot generate reverse
# relations and therefore cannot be used for reverse querying.
if field.is_relation and not field.related_model:
raise FieldError(
"Field %r does not generate an automatic reverse "
"relation and therefore cannot be used for reverse "
"querying. If it is a GenericForeignKey, consider "
"adding a GenericRelation." % name
)
try:
model = field.model._meta.concrete_model
except AttributeError:
# QuerySet.annotate() may introduce fields that aren't
# attached to a model.
model = None
else:
# We didn't find the current field, so move position back
# one step.
pos -= 1
if pos == -1 or fail_on_missing:
available = sorted(
[
*get_field_names_from_opts(opts),
*self.annotation_select,
*self._filtered_relations,
]
)
raise FieldError(
"Cannot resolve keyword '%s' into field. "
"Choices are: %s" % (name, ", ".join(available))
)
break
# Check if we need any joins for concrete inheritance cases (the
# field lives in parent, but we are currently in one of its
# children)
if opts is not None and model is not opts.model:
path_to_parent = opts.get_path_to_parent(model)
if path_to_parent:
path.extend(path_to_parent)
cur_names_with_path[1].extend(path_to_parent)
opts = path_to_parent[-1].to_opts
if hasattr(field, "path_infos"):
if filtered_relation:
pathinfos = field.get_path_info(filtered_relation)
else:
pathinfos = field.path_infos
if not allow_many:
for inner_pos, p in enumerate(pathinfos):
if p.m2m:
cur_names_with_path[1].extend(pathinfos[0 : inner_pos + 1])
names_with_path.append(cur_names_with_path)
raise MultiJoin(pos + 1, names_with_path)
last = pathinfos[-1]
path.extend(pathinfos)
final_field = last.join_field
opts = last.to_opts
targets = last.target_fields
cur_names_with_path[1].extend(pathinfos)
names_with_path.append(cur_names_with_path)
else:
# Local non-relational field.
final_field = field
targets = (field,)
if fail_on_missing and pos + 1 != len(names):
raise FieldError(
"Cannot resolve keyword %r into field. Join on '%s'"
" not permitted." % (names[pos + 1], name)
)
break
return path, final_field, targets, names[pos + 1 :]
def setup_joins(
self,
names,
opts,
alias,
can_reuse=None,
allow_many=True,
reuse_with_filtered_relation=False,
):
"""
Compute the necessary table joins for the passage through the fields
given in 'names'. 'opts' is the Options class for the current model
(which gives the table we are starting from), 'alias' is the alias for
the table to start the joining from.
The 'can_reuse' defines the reverse foreign key joins we can reuse. It
can be None in which case all joins are reusable or a set of aliases
that can be reused. Note that non-reverse foreign keys are always
reusable when using setup_joins().
The 'reuse_with_filtered_relation' can be used to force 'can_reuse'
parameter and force the relation on the given connections.
If 'allow_many' is False, then any reverse foreign key seen will
generate a MultiJoin exception.
Return the final field involved in the joins, the target field (used
for any 'where' constraint), the final 'opts' value, the joins, the
field path traveled to generate the joins, and a transform function
that takes a field and alias and is equivalent to `field.get_col(alias)`
in the simple case but wraps field transforms if they were included in
names.
The target field is the field containing the concrete value. Final
field can be something different, for example foreign key pointing to
that value. Final field is needed for example in some value
conversions (convert 'obj' in fk__id=obj to pk val using the foreign
key field for example).
"""
joins = [alias]
# The transform can't be applied yet, as joins must be trimmed later.
# To avoid making every caller of this method look up transforms
# directly, compute transforms here and create a partial that converts
# fields to the appropriate wrapped version.
def final_transformer(field, alias):
if not self.alias_cols:
alias = None
return field.get_col(alias)
# Try resolving all the names as fields first. If there's an error,
# treat trailing names as lookups until a field can be resolved.
last_field_exception = None
for pivot in range(len(names), 0, -1):
try:
path, final_field, targets, rest = self.names_to_path(
names[:pivot],
opts,
allow_many,
fail_on_missing=True,
)
except FieldError as exc:
if pivot == 1:
# The first item cannot be a lookup, so it's safe
# to raise the field error here.
raise
else:
last_field_exception = exc
else:
# The transforms are the remaining items that couldn't be
# resolved into fields.
transforms = names[pivot:]
break
for name in transforms:
def transform(field, alias, *, name, previous):
try:
wrapped = previous(field, alias)
return self.try_transform(wrapped, name)
except FieldError:
# FieldError is raised if the transform doesn't exist.
if isinstance(final_field, Field) and last_field_exception:
raise last_field_exception
else:
raise
final_transformer = functools.partial(
transform, name=name, previous=final_transformer
)
# Then, add the path to the query's joins. Note that we can't trim
# joins at this stage - we will need the information about join type
# of the trimmed joins.
for join in path:
if join.filtered_relation:
filtered_relation = join.filtered_relation.clone()
table_alias = filtered_relation.alias
else:
filtered_relation = None
table_alias = None
opts = join.to_opts
if join.direct:
nullable = self.is_nullable(join.join_field)
else:
nullable = True
connection = self.join_class(
opts.db_table,
alias,
table_alias,
INNER,
join.join_field,
nullable,
filtered_relation=filtered_relation,
)
reuse = can_reuse if join.m2m or reuse_with_filtered_relation else None
alias = self.join(
connection,
reuse=reuse,
reuse_with_filtered_relation=reuse_with_filtered_relation,
)
joins.append(alias)
if filtered_relation:
filtered_relation.path = joins[:]
return JoinInfo(final_field, targets, opts, joins, path, final_transformer)
def trim_joins(self, targets, joins, path):
"""
The 'target' parameter is the final field being joined to, 'joins'
is the full list of join aliases. The 'path' contain the PathInfos
used to create the joins.
Return the final target field and table alias and the new active
joins.
Always trim any direct join if the target column is already in the
previous table. Can't trim reverse joins as it's unknown if there's
anything on the other side of the join.
"""
joins = joins[:]
for pos, info in enumerate(reversed(path)):
if len(joins) == 1 or not info.direct:
break
if info.filtered_relation:
break
join_targets = {t.column for t in info.join_field.foreign_related_fields}
cur_targets = {t.column for t in targets}
if not cur_targets.issubset(join_targets):
break
targets_dict = {
r[1].column: r[0]
for r in info.join_field.related_fields
if r[1].column in cur_targets
}
targets = tuple(targets_dict[t.column] for t in targets)
self.unref_alias(joins.pop())
return targets, joins[-1], joins
@classmethod
def _gen_cols(cls, exprs, include_external=False):
for expr in exprs:
if isinstance(expr, Col):
yield expr
elif include_external and callable(
getattr(expr, "get_external_cols", None)
):
yield from expr.get_external_cols()
elif hasattr(expr, "get_source_expressions"):
yield from cls._gen_cols(
expr.get_source_expressions(),
include_external=include_external,
)
@classmethod
def _gen_col_aliases(cls, exprs):
yield from (expr.alias for expr in cls._gen_cols(exprs))
def resolve_ref(self, name, allow_joins=True, reuse=None, summarize=False):
annotation = self.annotations.get(name)
if annotation is not None:
if not allow_joins:
for alias in self._gen_col_aliases([annotation]):
if isinstance(self.alias_map[alias], Join):
raise FieldError(
"Joined field references are not permitted in this query"
)
if summarize:
# Summarize currently means we are doing an aggregate() query
# which is executed as a wrapped subquery if any of the
# aggregate() elements reference an existing annotation. In
# that case we need to return a Ref to the subquery's annotation.
if name not in self.annotation_select:
raise FieldError(
"Cannot aggregate over the '%s' alias. Use annotate() "
"to promote it." % name
)
return Ref(name, self.annotation_select[name])
else:
return annotation
else:
field_list = name.split(LOOKUP_SEP)
annotation = self.annotations.get(field_list[0])
if annotation is not None:
for transform in field_list[1:]:
annotation = self.try_transform(annotation, transform)
return annotation
join_info = self.setup_joins(
field_list, self.get_meta(), self.get_initial_alias(), can_reuse=reuse
)
targets, final_alias, join_list = self.trim_joins(
join_info.targets, join_info.joins, join_info.path
)
if not allow_joins and len(join_list) > 1:
raise FieldError(
"Joined field references are not permitted in this query"
)
if len(targets) > 1:
raise FieldError(
"Referencing multicolumn fields with F() objects isn't supported"
)
# Verify that the last lookup in name is a field or a transform:
# transform_function() raises FieldError if not.
transform = join_info.transform_function(targets[0], final_alias)
if reuse is not None:
reuse.update(join_list)
return transform
def split_exclude(self, filter_expr, can_reuse, names_with_path):
"""
When doing an exclude against any kind of N-to-many relation, we need
to use a subquery. This method constructs the nested query, given the
original exclude filter (filter_expr) and the portion up to the first
N-to-many relation field.
For example, if the origin filter is ~Q(child__name='foo'), filter_expr
is ('child__name', 'foo') and can_reuse is a set of joins usable for
filters in the original query.
We will turn this into equivalent of:
WHERE NOT EXISTS(
SELECT 1
FROM child
WHERE name = 'foo' AND child.parent_id = parent.id
LIMIT 1
)
"""
# Generate the inner query.
query = self.__class__(self.model)
query._filtered_relations = self._filtered_relations
filter_lhs, filter_rhs = filter_expr
if isinstance(filter_rhs, OuterRef):
filter_rhs = OuterRef(filter_rhs)
elif isinstance(filter_rhs, F):
filter_rhs = OuterRef(filter_rhs.name)
query.add_filter(filter_lhs, filter_rhs)
query.clear_ordering(force=True)
# Try to have as simple as possible subquery -> trim leading joins from
# the subquery.
trimmed_prefix, contains_louter = query.trim_start(names_with_path)
col = query.select[0]
select_field = col.target
alias = col.alias
if alias in can_reuse:
pk = select_field.model._meta.pk
# Need to add a restriction so that outer query's filters are in effect for
# the subquery, too.
query.bump_prefix(self)
lookup_class = select_field.get_lookup("exact")
# Note that the query.select[0].alias is different from alias
# due to bump_prefix above.
lookup = lookup_class(pk.get_col(query.select[0].alias), pk.get_col(alias))
query.where.add(lookup, AND)
query.external_aliases[alias] = True
lookup_class = select_field.get_lookup("exact")
lookup = lookup_class(col, ResolvedOuterRef(trimmed_prefix))
query.where.add(lookup, AND)
condition, needed_inner = self.build_filter(Exists(query))
if contains_louter:
or_null_condition, _ = self.build_filter(
("%s__isnull" % trimmed_prefix, True),
current_negated=True,
branch_negated=True,
can_reuse=can_reuse,
)
condition.add(or_null_condition, OR)
# Note that the end result will be:
# (outercol NOT IN innerq AND outercol IS NOT NULL) OR outercol IS NULL.
# This might look crazy but due to how IN works, this seems to be
# correct. If the IS NOT NULL check is removed then outercol NOT
# IN will return UNKNOWN. If the IS NULL check is removed, then if
# outercol IS NULL we will not match the row.
return condition, needed_inner
def set_empty(self):
self.where.add(NothingNode(), AND)
for query in self.combined_queries:
query.set_empty()
def is_empty(self):
return any(isinstance(c, NothingNode) for c in self.where.children)
def set_limits(self, low=None, high=None):
"""
Adjust the limits on the rows retrieved. Use low/high to set these,
as it makes it more Pythonic to read and write. When the SQL query is
created, convert them to the appropriate offset and limit values.
Apply any limits passed in here to the existing constraints. Add low
to the current low value and clamp both to any existing high value.
"""
if high is not None:
if self.high_mark is not None:
self.high_mark = min(self.high_mark, self.low_mark + high)
else:
self.high_mark = self.low_mark + high
if low is not None:
if self.high_mark is not None:
self.low_mark = min(self.high_mark, self.low_mark + low)
else:
self.low_mark = self.low_mark + low
if self.low_mark == self.high_mark:
self.set_empty()
def clear_limits(self):
"""Clear any existing limits."""
self.low_mark, self.high_mark = 0, None
@property
def is_sliced(self):
return self.low_mark != 0 or self.high_mark is not None
def has_limit_one(self):
return self.high_mark is not None and (self.high_mark - self.low_mark) == 1
def can_filter(self):
"""
Return True if adding filters to this instance is still possible.
Typically, this means no limits or offsets have been put on the results.
"""
return not self.is_sliced
def clear_select_clause(self):
"""Remove all fields from SELECT clause."""
self.select = ()
self.default_cols = False
self.select_related = False
self.set_extra_mask(())
self.set_annotation_mask(())
def clear_select_fields(self):
"""
Clear the list of fields to select (but not extra_select columns).
Some queryset types completely replace any existing list of select
columns.
"""
self.select = ()
self.values_select = ()
def add_select_col(self, col, name):
self.select += (col,)
self.values_select += (name,)
def set_select(self, cols):
self.default_cols = False
self.select = tuple(cols)
def add_distinct_fields(self, *field_names):
"""
Add and resolve the given fields to the query's "distinct on" clause.
"""
self.distinct_fields = field_names
self.distinct = True
def add_fields(self, field_names, allow_m2m=True):
"""
Add the given (model) fields to the select set. Add the field names in
the order specified.
"""
alias = self.get_initial_alias()
opts = self.get_meta()
try:
cols = []
for name in field_names:
# Join promotion note - we must not remove any rows here, so
# if there is no existing joins, use outer join.
join_info = self.setup_joins(
name.split(LOOKUP_SEP), opts, alias, allow_many=allow_m2m
)
targets, final_alias, joins = self.trim_joins(
join_info.targets,
join_info.joins,
join_info.path,
)
for target in targets:
cols.append(join_info.transform_function(target, final_alias))
if cols:
self.set_select(cols)
except MultiJoin:
raise FieldError("Invalid field name: '%s'" % name)
except FieldError:
if LOOKUP_SEP in name:
# For lookups spanning over relationships, show the error
# from the model on which the lookup failed.
raise
elif name in self.annotations:
raise FieldError(
"Cannot select the '%s' alias. Use annotate() to promote "
"it." % name
)
else:
names = sorted(
[
*get_field_names_from_opts(opts),
*self.extra,
*self.annotation_select,
*self._filtered_relations,
]
)
raise FieldError(
"Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names))
)
def add_ordering(self, *ordering):
"""
Add items from the 'ordering' sequence to the query's "order by"
clause. These items are either field names (not column names) --
possibly with a direction prefix ('-' or '?') -- or OrderBy
expressions.
If 'ordering' is empty, clear all ordering from the query.
"""
errors = []
for item in ordering:
if isinstance(item, str):
if item == "?":
continue
if item.startswith("-"):
item = item[1:]
if item in self.annotations:
continue
if self.extra and item in self.extra:
continue
# names_to_path() validates the lookup. A descriptive
# FieldError will be raise if it's not.
self.names_to_path(item.split(LOOKUP_SEP), self.model._meta)
elif not hasattr(item, "resolve_expression"):
errors.append(item)
if getattr(item, "contains_aggregate", False):
raise FieldError(
"Using an aggregate in order_by() without also including "
"it in annotate() is not allowed: %s" % item
)
if errors:
raise FieldError("Invalid order_by arguments: %s" % errors)
if ordering:
self.order_by += ordering
else:
self.default_ordering = False
def clear_ordering(self, force=False, clear_default=True):
"""
Remove any ordering settings if the current query allows it without
side effects, set 'force' to True to clear the ordering regardless.
If 'clear_default' is True, there will be no ordering in the resulting
query (not even the model's default).
"""
if not force and (
self.is_sliced or self.distinct_fields or self.select_for_update
):
return
self.order_by = ()
self.extra_order_by = ()
if clear_default:
self.default_ordering = False
def set_group_by(self, allow_aliases=True):
"""
Expand the GROUP BY clause required by the query.
This will usually be the set of all non-aggregate fields in the
return data. If the database backend supports grouping by the
primary key, and the query would be equivalent, the optimization
will be made automatically.
"""
# Column names from JOINs to check collisions with aliases.
if allow_aliases:
column_names = set()
seen_models = set()
for join in list(self.alias_map.values())[1:]: # Skip base table.
model = join.join_field.related_model
if model not in seen_models:
column_names.update(
{field.column for field in model._meta.local_concrete_fields}
)
seen_models.add(model)
group_by = list(self.select)
if self.annotation_select:
for alias, annotation in self.annotation_select.items():
if not allow_aliases or alias in column_names:
alias = None
group_by_cols = annotation.get_group_by_cols(alias=alias)
group_by.extend(group_by_cols)
self.group_by = tuple(group_by)
def add_select_related(self, fields):
"""
Set up the select_related data structure so that we only select
certain related models (as opposed to all models, when
self.select_related=True).
"""
if isinstance(self.select_related, bool):
field_dict = {}
else:
field_dict = self.select_related
for field in fields:
d = field_dict
for part in field.split(LOOKUP_SEP):
d = d.setdefault(part, {})
self.select_related = field_dict
def add_extra(self, select, select_params, where, params, tables, order_by):
"""
Add data to the various extra_* attributes for user-created additions
to the query.
"""
if select:
# We need to pair any placeholder markers in the 'select'
# dictionary with their parameters in 'select_params' so that
# subsequent updates to the select dictionary also adjust the
# parameters appropriately.
select_pairs = {}
if select_params:
param_iter = iter(select_params)
else:
param_iter = iter([])
for name, entry in select.items():
self.check_alias(name)
entry = str(entry)
entry_params = []
pos = entry.find("%s")
while pos != -1:
if pos == 0 or entry[pos - 1] != "%":
entry_params.append(next(param_iter))
pos = entry.find("%s", pos + 2)
select_pairs[name] = (entry, entry_params)
self.extra.update(select_pairs)
if where or params:
self.where.add(ExtraWhere(where, params), AND)
if tables:
self.extra_tables += tuple(tables)
if order_by:
self.extra_order_by = order_by
def clear_deferred_loading(self):
"""Remove any fields from the deferred loading set."""
self.deferred_loading = (frozenset(), True)
def add_deferred_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
exclude from loading from the database when automatic column selection
is done. Add the new field names to any existing field names that
are deferred (or removed from any existing field names that are marked
as the only ones for immediate loading).
"""
# Fields on related models are stored in the literal double-underscore
# format, so that we can use a set datastructure. We do the foo__bar
# splitting and handling when computing the SQL column names (as part of
# get_columns()).
existing, defer = self.deferred_loading
if defer:
# Add to existing deferred names.
self.deferred_loading = existing.union(field_names), True
else:
# Remove names from the set of any existing "immediate load" names.
if new_existing := existing.difference(field_names):
self.deferred_loading = new_existing, False
else:
self.clear_deferred_loading()
if new_only := set(field_names).difference(existing):
self.deferred_loading = new_only, True
def add_immediate_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
retrieve when the SQL is executed ("immediate loading" fields). The
field names replace any existing immediate loading field names. If
there are field names already specified for deferred loading, remove
those names from the new field_names before storing the new names
for immediate loading. (That is, immediate loading overrides any
existing immediate values, but respects existing deferrals.)
"""
existing, defer = self.deferred_loading
field_names = set(field_names)
if "pk" in field_names:
field_names.remove("pk")
field_names.add(self.get_meta().pk.name)
if defer:
# Remove any existing deferred names from the current set before
# setting the new names.
self.deferred_loading = field_names.difference(existing), False
else:
# Replace any existing "immediate load" field names.
self.deferred_loading = frozenset(field_names), False
def set_annotation_mask(self, names):
"""Set the mask of annotations that will be returned by the SELECT."""
if names is None:
self.annotation_select_mask = None
else:
self.annotation_select_mask = set(names)
self._annotation_select_cache = None
def append_annotation_mask(self, names):
if self.annotation_select_mask is not None:
self.set_annotation_mask(self.annotation_select_mask.union(names))
def set_extra_mask(self, names):
"""
Set the mask of extra select items that will be returned by SELECT.
Don't remove them from the Query since they might be used later.
"""
if names is None:
self.extra_select_mask = None
else:
self.extra_select_mask = set(names)
self._extra_select_cache = None
def set_values(self, fields):
self.select_related = False
self.clear_deferred_loading()
self.clear_select_fields()
if fields:
field_names = []
extra_names = []
annotation_names = []
if not self.extra and not self.annotations:
# Shortcut - if there are no extra or annotations, then
# the values() clause must be just field names.
field_names = list(fields)
else:
self.default_cols = False
for f in fields:
if f in self.extra_select:
extra_names.append(f)
elif f in self.annotation_select:
annotation_names.append(f)
else:
field_names.append(f)
self.set_extra_mask(extra_names)
self.set_annotation_mask(annotation_names)
selected = frozenset(field_names + extra_names + annotation_names)
else:
field_names = [f.attname for f in self.model._meta.concrete_fields]
selected = frozenset(field_names)
# Selected annotations must be known before setting the GROUP BY
# clause.
if self.group_by is True:
self.add_fields(
(f.attname for f in self.model._meta.concrete_fields), False
)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
self.set_group_by(allow_aliases=False)
self.clear_select_fields()
elif self.group_by:
# Resolve GROUP BY annotation references if they are not part of
# the selected fields anymore.
group_by = []
for expr in self.group_by:
if isinstance(expr, Ref) and expr.refs not in selected:
expr = self.annotations[expr.refs]
group_by.append(expr)
self.group_by = tuple(group_by)
self.values_select = tuple(field_names)
self.add_fields(field_names, True)
@property
def annotation_select(self):
"""
Return the dictionary of aggregate columns that are not masked and
should be used in the SELECT clause. Cache this result for performance.
"""
if self._annotation_select_cache is not None:
return self._annotation_select_cache
elif not self.annotations:
return {}
elif self.annotation_select_mask is not None:
self._annotation_select_cache = {
k: v
for k, v in self.annotations.items()
if k in self.annotation_select_mask
}
return self._annotation_select_cache
else:
return self.annotations
@property
def extra_select(self):
if self._extra_select_cache is not None:
return self._extra_select_cache
if not self.extra:
return {}
elif self.extra_select_mask is not None:
self._extra_select_cache = {
k: v for k, v in self.extra.items() if k in self.extra_select_mask
}
return self._extra_select_cache
else:
return self.extra
def trim_start(self, names_with_path):
"""
Trim joins from the start of the join path. The candidates for trim
are the PathInfos in names_with_path structure that are m2m joins.
Also set the select column so the start matches the join.
This method is meant to be used for generating the subquery joins &
cols in split_exclude().
Return a lookup usable for doing outerq.filter(lookup=self) and a
boolean indicating if the joins in the prefix contain a LEFT OUTER join.
_"""
all_paths = []
for _, paths in names_with_path:
all_paths.extend(paths)
contains_louter = False
# Trim and operate only on tables that were generated for
# the lookup part of the query. That is, avoid trimming
# joins generated for F() expressions.
lookup_tables = [
t for t in self.alias_map if t in self._lookup_joins or t == self.base_table
]
for trimmed_paths, path in enumerate(all_paths):
if path.m2m:
break
if self.alias_map[lookup_tables[trimmed_paths + 1]].join_type == LOUTER:
contains_louter = True
alias = lookup_tables[trimmed_paths]
self.unref_alias(alias)
# The path.join_field is a Rel, lets get the other side's field
join_field = path.join_field.field
# Build the filter prefix.
paths_in_prefix = trimmed_paths
trimmed_prefix = []
for name, path in names_with_path:
if paths_in_prefix - len(path) < 0:
break
trimmed_prefix.append(name)
paths_in_prefix -= len(path)
trimmed_prefix.append(join_field.foreign_related_fields[0].name)
trimmed_prefix = LOOKUP_SEP.join(trimmed_prefix)
# Lets still see if we can trim the first join from the inner query
# (that is, self). We can't do this for:
# - LEFT JOINs because we would miss those rows that have nothing on
# the outer side,
# - INNER JOINs from filtered relations because we would miss their
# filters.
first_join = self.alias_map[lookup_tables[trimmed_paths + 1]]
if first_join.join_type != LOUTER and not first_join.filtered_relation:
select_fields = [r[0] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths + 1]
self.unref_alias(lookup_tables[trimmed_paths])
extra_restriction = join_field.get_extra_restriction(
None, lookup_tables[trimmed_paths + 1]
)
if extra_restriction:
self.where.add(extra_restriction, AND)
else:
# TODO: It might be possible to trim more joins from the start of the
# inner query if it happens to have a longer join chain containing the
# values in select_fields. Lets punt this one for now.
select_fields = [r[1] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths]
# The found starting point is likely a join_class instead of a
# base_table_class reference. But the first entry in the query's FROM
# clause must not be a JOIN.
for table in self.alias_map:
if self.alias_refcount[table] > 0:
self.alias_map[table] = self.base_table_class(
self.alias_map[table].table_name,
table,
)
break
self.set_select([f.get_col(select_alias) for f in select_fields])
return trimmed_prefix, contains_louter
def is_nullable(self, field):
"""
Check if the given field should be treated as nullable.
Some backends treat '' as null and Django treats such fields as
nullable for those backends. In such situations field.null can be
False even if we should treat the field as nullable.
"""
# We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have
# (nor should it have) knowledge of which connection is going to be
# used. The proper fix would be to defer all decisions where
# is_nullable() is needed to the compiler stage, but that is not easy
# to do currently.
return field.null or (
field.empty_strings_allowed
and connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
)
def get_order_dir(field, default="ASC"):
"""
Return the field name and direction for an order specification. For
example, '-foo' is returned as ('foo', 'DESC').
The 'default' param is used to indicate which way no prefix (or a '+'
prefix) should sort. The '-' prefix always sorts the opposite way.
"""
dirn = ORDER_DIR[default]
if field[0] == "-":
return field[1:], dirn[1]
return field, dirn[0]
def add_to_dict(data, key, value):
"""
Add "value" to the set of values for "key", whether or not "key" already
exists.
"""
if key in data:
data[key].add(value)
else:
data[key] = {value}
def is_reverse_o2o(field):
"""
Check if the given field is reverse-o2o. The field is expected to be some
sort of relation field or related object.
"""
return field.is_relation and field.one_to_one and not field.concrete
class JoinPromoter:
"""
A class to abstract away join promotion problems for complex filter
conditions.
"""
def __init__(self, connector, num_children, negated):
self.connector = connector
self.negated = negated
if self.negated:
if connector == AND:
self.effective_connector = OR
else:
self.effective_connector = AND
else:
self.effective_connector = self.connector
self.num_children = num_children
# Maps of table alias to how many times it is seen as required for
# inner and/or outer joins.
self.votes = Counter()
def __repr__(self):
return (
f"{self.__class__.__qualname__}(connector={self.connector!r}, "
f"num_children={self.num_children!r}, negated={self.negated!r})"
)
def add_votes(self, votes):
"""
Add single vote per item to self.votes. Parameter can be any
iterable.
"""
self.votes.update(votes)
def update_join_types(self, query):
"""
Change join types so that the generated query is as efficient as
possible, but still correct. So, change as many joins as possible
to INNER, but don't make OUTER joins INNER if that could remove
results from the query.
"""
to_promote = set()
to_demote = set()
# The effective_connector is used so that NOT (a AND b) is treated
# similarly to (a OR b) for join promotion.
for table, votes in self.votes.items():
# We must use outer joins in OR case when the join isn't contained
# in all of the joins. Otherwise the INNER JOIN itself could remove
# valid results. Consider the case where a model with rel_a and
# rel_b relations is queried with rel_a__col=1 | rel_b__col=2. Now,
# if rel_a join doesn't produce any results is null (for example
# reverse foreign key or null value in direct foreign key), and
# there is a matching row in rel_b with col=2, then an INNER join
# to rel_a would remove a valid match from the query. So, we need
# to promote any existing INNER to LOUTER (it is possible this
# promotion in turn will be demoted later on).
if self.effective_connector == "OR" and votes < self.num_children:
to_promote.add(table)
# If connector is AND and there is a filter that can match only
# when there is a joinable row, then use INNER. For example, in
# rel_a__col=1 & rel_b__col=2, if either of the rels produce NULL
# as join output, then the col=1 or col=2 can't match (as
# NULL=anything is always false).
# For the OR case, if all children voted for a join to be inner,
# then we can use INNER for the join. For example:
# (rel_a__col__icontains=Alex | rel_a__col__icontains=Russell)
# then if rel_a doesn't produce any rows, the whole condition
# can't match. Hence we can safely use INNER join.
if self.effective_connector == "AND" or (
self.effective_connector == "OR" and votes == self.num_children
):
to_demote.add(table)
# Finally, what happens in cases where we have:
# (rel_a__col=1|rel_b__col=2) & rel_a__col__gte=0
# Now, we first generate the OR clause, and promote joins for it
# in the first if branch above. Both rel_a and rel_b are promoted
# to LOUTER joins. After that we do the AND case. The OR case
# voted no inner joins but the rel_a__col__gte=0 votes inner join
# for rel_a. We demote it back to INNER join (in AND case a single
# vote is enough). The demotion is OK, if rel_a doesn't produce
# rows, then the rel_a__col__gte=0 clause can't be true, and thus
# the whole clause must be false. So, it is safe to use INNER
# join.
# Note that in this example we could just as well have the __gte
# clause and the OR clause swapped. Or we could replace the __gte
# clause with an OR clause containing rel_a__col=1|rel_a__col=2,
# and again we could safely demote to INNER.
query.promote_joins(to_promote)
query.demote_joins(to_demote)
return to_demote
|
2275dfabb4ea9200d946e6ea2debcfc18766b5b736421a166eb7f2d278c79bfe | import collections
import json
import re
from functools import partial
from itertools import chain
from django.core.exceptions import EmptyResultSet, FieldError
from django.db import DatabaseError, NotSupportedError
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import F, OrderBy, RawSQL, Ref, Value
from django.db.models.functions import Cast, Random
from django.db.models.query_utils import select_related_descend
from django.db.models.sql.constants import (
CURSOR,
GET_ITERATOR_CHUNK_SIZE,
MULTI,
NO_RESULTS,
ORDER_DIR,
SINGLE,
)
from django.db.models.sql.query import Query, get_order_dir
from django.db.transaction import TransactionManagementError
from django.utils.functional import cached_property
from django.utils.hashable import make_hashable
from django.utils.regex_helper import _lazy_re_compile
class SQLCompiler:
# Multiline ordering SQL clause may appear from RawSQL.
ordering_parts = _lazy_re_compile(
r"^(.*)\s(?:ASC|DESC).*",
re.MULTILINE | re.DOTALL,
)
def __init__(self, query, connection, using, elide_empty=True):
self.query = query
self.connection = connection
self.using = using
# Some queries, e.g. coalesced aggregation, need to be executed even if
# they would return an empty result set.
self.elide_empty = elide_empty
self.quote_cache = {"*": "*"}
# The select, klass_info, and annotations are needed by QuerySet.iterator()
# these are set as a side-effect of executing the query. Note that we calculate
# separately a list of extra select columns needed for grammatical correctness
# of the query, but these columns are not included in self.select.
self.select = None
self.annotation_col_map = None
self.klass_info = None
self._meta_ordering = None
def __repr__(self):
return (
f"<{self.__class__.__qualname__} "
f"model={self.query.model.__qualname__} "
f"connection={self.connection!r} using={self.using!r}>"
)
def setup_query(self):
if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map):
self.query.get_initial_alias()
self.select, self.klass_info, self.annotation_col_map = self.get_select()
self.col_count = len(self.select)
def pre_sql_setup(self):
"""
Do any necessary class setup immediately prior to producing SQL. This
is for things that can't necessarily be done in __init__ because we
might not have all the pieces in place at that time.
"""
self.setup_query()
order_by = self.get_order_by()
self.where, self.having = self.query.where.split_having()
extra_select = self.get_extra_select(order_by, self.select)
self.has_extra_select = bool(extra_select)
group_by = self.get_group_by(self.select + extra_select, order_by)
return extra_select, order_by, group_by
def get_group_by(self, select, order_by):
"""
Return a list of 2-tuples of form (sql, params).
The logic of what exactly the GROUP BY clause contains is hard
to describe in other words than "if it passes the test suite,
then it is correct".
"""
# Some examples:
# SomeModel.objects.annotate(Count('somecol'))
# GROUP BY: all fields of the model
#
# SomeModel.objects.values('name').annotate(Count('somecol'))
# GROUP BY: name
#
# SomeModel.objects.annotate(Count('somecol')).values('name')
# GROUP BY: all cols of the model
#
# SomeModel.objects.values('name', 'pk')
# .annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# SomeModel.objects.values('name').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# In fact, the self.query.group_by is the minimal set to GROUP BY. It
# can't be ever restricted to a smaller set, but additional columns in
# HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately
# the end result is that it is impossible to force the query to have
# a chosen GROUP BY clause - you can almost do this by using the form:
# .values(*wanted_cols).annotate(AnAggregate())
# but any later annotations, extra selects, values calls that
# refer some column outside of the wanted_cols, order_by, or even
# filter calls can alter the GROUP BY clause.
# The query.group_by is either None (no GROUP BY at all), True
# (group by select fields), or a list of expressions to be added
# to the group by.
if self.query.group_by is None:
return []
expressions = []
if self.query.group_by is not True:
# If the group by is set to a list (by .values() call most likely),
# then we need to add everything in it to the GROUP BY clause.
# Backwards compatibility hack for setting query.group_by. Remove
# when we have public API way of forcing the GROUP BY clause.
# Converts string references to expressions.
for expr in self.query.group_by:
if not hasattr(expr, "as_sql"):
expressions.append(self.query.resolve_ref(expr))
else:
expressions.append(expr)
# Note that even if the group_by is set, it is only the minimal
# set to group by. So, we need to add cols in select, order_by, and
# having into the select in any case.
ref_sources = {expr.source for expr in expressions if isinstance(expr, Ref)}
for expr, _, _ in select:
# Skip members of the select clause that are already included
# by reference.
if expr in ref_sources:
continue
cols = expr.get_group_by_cols()
for col in cols:
expressions.append(col)
if not self._meta_ordering:
for expr, (sql, params, is_ref) in order_by:
# Skip references to the SELECT clause, as all expressions in
# the SELECT clause are already part of the GROUP BY.
if not is_ref:
expressions.extend(expr.get_group_by_cols())
having_group_by = self.having.get_group_by_cols() if self.having else ()
for expr in having_group_by:
expressions.append(expr)
result = []
seen = set()
expressions = self.collapse_group_by(expressions, having_group_by)
for expr in expressions:
sql, params = self.compile(expr)
sql, params = expr.select_format(self, sql, params)
params_hash = make_hashable(params)
if (sql, params_hash) not in seen:
result.append((sql, params))
seen.add((sql, params_hash))
return result
def collapse_group_by(self, expressions, having):
# If the DB can group by primary key, then group by the primary key of
# query's main model. Note that for PostgreSQL the GROUP BY clause must
# include the primary key of every table, but for MySQL it is enough to
# have the main table's primary key.
if self.connection.features.allows_group_by_pk:
# Determine if the main model's primary key is in the query.
pk = None
for expr in expressions:
# Is this a reference to query's base table primary key? If the
# expression isn't a Col-like, then skip the expression.
if (
getattr(expr, "target", None) == self.query.model._meta.pk
and getattr(expr, "alias", None) == self.query.base_table
):
pk = expr
break
# If the main model's primary key is in the query, group by that
# field, HAVING expressions, and expressions associated with tables
# that don't have a primary key included in the grouped columns.
if pk:
pk_aliases = {
expr.alias
for expr in expressions
if hasattr(expr, "target") and expr.target.primary_key
}
expressions = [pk] + [
expr
for expr in expressions
if expr in having
or (
getattr(expr, "alias", None) is not None
and expr.alias not in pk_aliases
)
]
elif self.connection.features.allows_group_by_selected_pks:
# Filter out all expressions associated with a table's primary key
# present in the grouped columns. This is done by identifying all
# tables that have their primary key included in the grouped
# columns and removing non-primary key columns referring to them.
# Unmanaged models are excluded because they could be representing
# database views on which the optimization might not be allowed.
pks = {
expr
for expr in expressions
if (
hasattr(expr, "target")
and expr.target.primary_key
and self.connection.features.allows_group_by_selected_pks_on_model(
expr.target.model
)
)
}
aliases = {expr.alias for expr in pks}
expressions = [
expr
for expr in expressions
if expr in pks or getattr(expr, "alias", None) not in aliases
]
return expressions
def get_select(self):
"""
Return three values:
- a list of 3-tuples of (expression, (sql, params), alias)
- a klass_info structure,
- a dictionary of annotations
The (sql, params) is what the expression will produce, and alias is the
"AS alias" for the column (possibly None).
The klass_info structure contains the following information:
- The base model of the query.
- Which columns for that model are present in the query (by
position of the select clause).
- related_klass_infos: [f, klass_info] to descent into
The annotations is a dictionary of {'attname': column position} values.
"""
select = []
klass_info = None
annotations = {}
select_idx = 0
for alias, (sql, params) in self.query.extra_select.items():
annotations[alias] = select_idx
select.append((RawSQL(sql, params), alias))
select_idx += 1
assert not (self.query.select and self.query.default_cols)
if self.query.default_cols:
cols = self.get_default_columns()
else:
# self.query.select is a special case. These columns never go to
# any model.
cols = self.query.select
if cols:
select_list = []
for col in cols:
select_list.append(select_idx)
select.append((col, None))
select_idx += 1
klass_info = {
"model": self.query.model,
"select_fields": select_list,
}
for alias, annotation in self.query.annotation_select.items():
annotations[alias] = select_idx
select.append((annotation, alias))
select_idx += 1
if self.query.select_related:
related_klass_infos = self.get_related_selections(select)
klass_info["related_klass_infos"] = related_klass_infos
def get_select_from_parent(klass_info):
for ki in klass_info["related_klass_infos"]:
if ki["from_parent"]:
ki["select_fields"] = (
klass_info["select_fields"] + ki["select_fields"]
)
get_select_from_parent(ki)
get_select_from_parent(klass_info)
ret = []
for col, alias in select:
try:
sql, params = self.compile(col)
except EmptyResultSet:
empty_result_set_value = getattr(
col, "empty_result_set_value", NotImplemented
)
if empty_result_set_value is NotImplemented:
# Select a predicate that's always False.
sql, params = "0", ()
else:
sql, params = self.compile(Value(empty_result_set_value))
else:
sql, params = col.select_format(self, sql, params)
ret.append((col, (sql, params), alias))
return ret, klass_info, annotations
def _order_by_pairs(self):
if self.query.extra_order_by:
ordering = self.query.extra_order_by
elif not self.query.default_ordering:
ordering = self.query.order_by
elif self.query.order_by:
ordering = self.query.order_by
elif (meta := self.query.get_meta()) and meta.ordering:
ordering = meta.ordering
self._meta_ordering = ordering
else:
ordering = []
if self.query.standard_ordering:
default_order, _ = ORDER_DIR["ASC"]
else:
default_order, _ = ORDER_DIR["DESC"]
for field in ordering:
if hasattr(field, "resolve_expression"):
if isinstance(field, Value):
# output_field must be resolved for constants.
field = Cast(field, field.output_field)
if not isinstance(field, OrderBy):
field = field.asc()
if not self.query.standard_ordering:
field = field.copy()
field.reverse_ordering()
yield field, False
continue
if field == "?": # random
yield OrderBy(Random()), False
continue
col, order = get_order_dir(field, default_order)
descending = order == "DESC"
if col in self.query.annotation_select:
# Reference to expression in SELECT clause
yield (
OrderBy(
Ref(col, self.query.annotation_select[col]),
descending=descending,
),
True,
)
continue
if col in self.query.annotations:
# References to an expression which is masked out of the SELECT
# clause.
if self.query.combinator and self.select:
# Don't use the resolved annotation because other
# combinated queries might define it differently.
expr = F(col)
else:
expr = self.query.annotations[col]
if isinstance(expr, Value):
# output_field must be resolved for constants.
expr = Cast(expr, expr.output_field)
yield OrderBy(expr, descending=descending), False
continue
if "." in field:
# This came in through an extra(order_by=...) addition. Pass it
# on verbatim.
table, col = col.split(".", 1)
yield (
OrderBy(
RawSQL(
"%s.%s" % (self.quote_name_unless_alias(table), col), []
),
descending=descending,
),
False,
)
continue
if self.query.extra and col in self.query.extra:
if col in self.query.extra_select:
yield (
OrderBy(
Ref(col, RawSQL(*self.query.extra[col])),
descending=descending,
),
True,
)
else:
yield (
OrderBy(RawSQL(*self.query.extra[col]), descending=descending),
False,
)
else:
if self.query.combinator and self.select:
# Don't use the first model's field because other
# combinated queries might define it differently.
yield OrderBy(F(col), descending=descending), False
else:
# 'col' is of the form 'field' or 'field1__field2' or
# '-field1__field2__field', etc.
yield from self.find_ordering_name(
field,
self.query.get_meta(),
default_order=default_order,
)
def get_order_by(self):
"""
Return a list of 2-tuples of the form (expr, (sql, params, is_ref)) for
the ORDER BY clause.
The order_by clause can alter the select clause (for example it can add
aliases to clauses that do not yet have one, or it can add totally new
select clauses).
"""
result = []
seen = set()
for expr, is_ref in self._order_by_pairs():
resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None)
if self.query.combinator and self.select:
src = resolved.get_source_expressions()[0]
expr_src = expr.get_source_expressions()[0]
# Relabel order by columns to raw numbers if this is a combined
# query; necessary since the columns can't be referenced by the
# fully qualified name and the simple column names may collide.
for idx, (sel_expr, _, col_alias) in enumerate(self.select):
if is_ref and col_alias == src.refs:
src = src.source
elif col_alias and not (
isinstance(expr_src, F) and col_alias == expr_src.name
):
continue
if src == sel_expr:
resolved.set_source_expressions([RawSQL("%d" % (idx + 1), ())])
break
else:
if col_alias:
raise DatabaseError(
"ORDER BY term does not match any column in the result set."
)
# Add column used in ORDER BY clause to the selected
# columns and to each combined query.
order_by_idx = len(self.query.select) + 1
col_name = f"__orderbycol{order_by_idx}"
for q in self.query.combined_queries:
q.add_annotation(expr_src, col_name)
self.query.add_select_col(resolved, col_name)
resolved.set_source_expressions([RawSQL(f"{order_by_idx}", ())])
sql, params = self.compile(resolved)
# Don't add the same column twice, but the order direction is
# not taken into account so we strip it. When this entire method
# is refactored into expressions, then we can check each part as we
# generate it.
without_ordering = self.ordering_parts.search(sql)[1]
params_hash = make_hashable(params)
if (without_ordering, params_hash) in seen:
continue
seen.add((without_ordering, params_hash))
result.append((resolved, (sql, params, is_ref)))
return result
def get_extra_select(self, order_by, select):
extra_select = []
if self.query.distinct and not self.query.distinct_fields:
select_sql = [t[1] for t in select]
for expr, (sql, params, is_ref) in order_by:
without_ordering = self.ordering_parts.search(sql)[1]
if not is_ref and (without_ordering, params) not in select_sql:
extra_select.append((expr, (without_ordering, params), None))
return extra_select
def quote_name_unless_alias(self, name):
"""
A wrapper around connection.ops.quote_name that doesn't quote aliases
for table names. This avoids problems with some SQL dialects that treat
quoted strings specially (e.g. PostgreSQL).
"""
if name in self.quote_cache:
return self.quote_cache[name]
if (
(name in self.query.alias_map and name not in self.query.table_map)
or name in self.query.extra_select
or (
self.query.external_aliases.get(name)
and name not in self.query.table_map
)
):
self.quote_cache[name] = name
return name
r = self.connection.ops.quote_name(name)
self.quote_cache[name] = r
return r
def compile(self, node):
vendor_impl = getattr(node, "as_" + self.connection.vendor, None)
if vendor_impl:
sql, params = vendor_impl(self, self.connection)
else:
sql, params = node.as_sql(self, self.connection)
return sql, params
def get_combinator_sql(self, combinator, all):
features = self.connection.features
compilers = [
query.get_compiler(self.using, self.connection, self.elide_empty)
for query in self.query.combined_queries
if not query.is_empty()
]
if not features.supports_slicing_ordering_in_compound:
for query, compiler in zip(self.query.combined_queries, compilers):
if query.low_mark or query.high_mark:
raise DatabaseError(
"LIMIT/OFFSET not allowed in subqueries of compound statements."
)
if compiler.get_order_by():
raise DatabaseError(
"ORDER BY not allowed in subqueries of compound statements."
)
parts = ()
for compiler in compilers:
try:
# If the columns list is limited, then all combined queries
# must have the same columns list. Set the selects defined on
# the query on all combined queries, if not already set.
if not compiler.query.values_select and self.query.values_select:
compiler.query = compiler.query.clone()
compiler.query.set_values(
(
*self.query.extra_select,
*self.query.values_select,
*self.query.annotation_select,
)
)
part_sql, part_args = compiler.as_sql()
if compiler.query.combinator:
# Wrap in a subquery if wrapping in parentheses isn't
# supported.
if not features.supports_parentheses_in_compound:
part_sql = "SELECT * FROM ({})".format(part_sql)
# Add parentheses when combining with compound query if not
# already added for all compound queries.
elif (
self.query.subquery
or not features.supports_slicing_ordering_in_compound
):
part_sql = "({})".format(part_sql)
parts += ((part_sql, part_args),)
except EmptyResultSet:
# Omit the empty queryset with UNION and with DIFFERENCE if the
# first queryset is nonempty.
if combinator == "union" or (combinator == "difference" and parts):
continue
raise
if not parts:
raise EmptyResultSet
combinator_sql = self.connection.ops.set_operators[combinator]
if all and combinator == "union":
combinator_sql += " ALL"
braces = "{}"
if not self.query.subquery and features.supports_slicing_ordering_in_compound:
braces = "({})"
sql_parts, args_parts = zip(
*((braces.format(sql), args) for sql, args in parts)
)
result = [" {} ".format(combinator_sql).join(sql_parts)]
params = []
for part in args_parts:
params.extend(part)
return result, params
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
If 'with_limits' is False, any limit/offset information is not included
in the query.
"""
refcounts_before = self.query.alias_refcount.copy()
try:
extra_select, order_by, group_by = self.pre_sql_setup()
for_update_part = None
# Is a LIMIT/OFFSET clause needed?
with_limit_offset = with_limits and (
self.query.high_mark is not None or self.query.low_mark
)
combinator = self.query.combinator
features = self.connection.features
if combinator:
if not getattr(features, "supports_select_{}".format(combinator)):
raise NotSupportedError(
"{} is not supported on this database backend.".format(
combinator
)
)
result, params = self.get_combinator_sql(
combinator, self.query.combinator_all
)
else:
distinct_fields, distinct_params = self.get_distinct()
# This must come after 'select', 'ordering', and 'distinct'
# (see docstring of get_from_clause() for details).
from_, f_params = self.get_from_clause()
try:
where, w_params = (
self.compile(self.where) if self.where is not None else ("", [])
)
except EmptyResultSet:
if self.elide_empty:
raise
# Use a predicate that's always False.
where, w_params = "0 = 1", []
having, h_params = (
self.compile(self.having) if self.having is not None else ("", [])
)
result = ["SELECT"]
params = []
if self.query.distinct:
distinct_result, distinct_params = self.connection.ops.distinct_sql(
distinct_fields,
distinct_params,
)
result += distinct_result
params += distinct_params
out_cols = []
col_idx = 1
for _, (s_sql, s_params), alias in self.select + extra_select:
if alias:
s_sql = "%s AS %s" % (
s_sql,
self.connection.ops.quote_name(alias),
)
elif with_col_aliases:
s_sql = "%s AS %s" % (
s_sql,
self.connection.ops.quote_name("col%d" % col_idx),
)
col_idx += 1
params.extend(s_params)
out_cols.append(s_sql)
result += [", ".join(out_cols)]
if from_:
result += ["FROM", *from_]
elif self.connection.features.bare_select_suffix:
result += [self.connection.features.bare_select_suffix]
params.extend(f_params)
if self.query.select_for_update and features.has_select_for_update:
if (
self.connection.get_autocommit()
# Don't raise an exception when database doesn't
# support transactions, as it's a noop.
and features.supports_transactions
):
raise TransactionManagementError(
"select_for_update cannot be used outside of a transaction."
)
if (
with_limit_offset
and not features.supports_select_for_update_with_limit
):
raise NotSupportedError(
"LIMIT/OFFSET is not supported with "
"select_for_update on this database backend."
)
nowait = self.query.select_for_update_nowait
skip_locked = self.query.select_for_update_skip_locked
of = self.query.select_for_update_of
no_key = self.query.select_for_no_key_update
# If it's a NOWAIT/SKIP LOCKED/OF/NO KEY query but the
# backend doesn't support it, raise NotSupportedError to
# prevent a possible deadlock.
if nowait and not features.has_select_for_update_nowait:
raise NotSupportedError(
"NOWAIT is not supported on this database backend."
)
elif skip_locked and not features.has_select_for_update_skip_locked:
raise NotSupportedError(
"SKIP LOCKED is not supported on this database backend."
)
elif of and not features.has_select_for_update_of:
raise NotSupportedError(
"FOR UPDATE OF is not supported on this database backend."
)
elif no_key and not features.has_select_for_no_key_update:
raise NotSupportedError(
"FOR NO KEY UPDATE is not supported on this "
"database backend."
)
for_update_part = self.connection.ops.for_update_sql(
nowait=nowait,
skip_locked=skip_locked,
of=self.get_select_for_update_of_arguments(),
no_key=no_key,
)
if for_update_part and features.for_update_after_from:
result.append(for_update_part)
if where:
result.append("WHERE %s" % where)
params.extend(w_params)
grouping = []
for g_sql, g_params in group_by:
grouping.append(g_sql)
params.extend(g_params)
if grouping:
if distinct_fields:
raise NotImplementedError(
"annotate() + distinct(fields) is not implemented."
)
order_by = order_by or self.connection.ops.force_no_ordering()
result.append("GROUP BY %s" % ", ".join(grouping))
if self._meta_ordering:
order_by = None
if having:
result.append("HAVING %s" % having)
params.extend(h_params)
if self.query.explain_info:
result.insert(
0,
self.connection.ops.explain_query_prefix(
self.query.explain_info.format,
**self.query.explain_info.options,
),
)
if order_by:
ordering = []
for _, (o_sql, o_params, _) in order_by:
ordering.append(o_sql)
params.extend(o_params)
result.append("ORDER BY %s" % ", ".join(ordering))
if with_limit_offset:
result.append(
self.connection.ops.limit_offset_sql(
self.query.low_mark, self.query.high_mark
)
)
if for_update_part and not features.for_update_after_from:
result.append(for_update_part)
if self.query.subquery and extra_select:
# If the query is used as a subquery, the extra selects would
# result in more columns than the left-hand side expression is
# expecting. This can happen when a subquery uses a combination
# of order_by() and distinct(), forcing the ordering expressions
# to be selected as well. Wrap the query in another subquery
# to exclude extraneous selects.
sub_selects = []
sub_params = []
for index, (select, _, alias) in enumerate(self.select, start=1):
if not alias and with_col_aliases:
alias = "col%d" % index
if alias:
sub_selects.append(
"%s.%s"
% (
self.connection.ops.quote_name("subquery"),
self.connection.ops.quote_name(alias),
)
)
else:
select_clone = select.relabeled_clone(
{select.alias: "subquery"}
)
subselect, subparams = select_clone.as_sql(
self, self.connection
)
sub_selects.append(subselect)
sub_params.extend(subparams)
return "SELECT %s FROM (%s) subquery" % (
", ".join(sub_selects),
" ".join(result),
), tuple(sub_params + params)
return " ".join(result), tuple(params)
finally:
# Finally do cleanup - get rid of the joins we created above.
self.query.reset_refcounts(refcounts_before)
def get_default_columns(self, start_alias=None, opts=None, from_parent=None):
"""
Compute the default columns for selecting every field in the base
model. Will sometimes be called to pull in related models (e.g. via
select_related), in which case "opts" and "start_alias" will be given
to provide a starting point for the traversal.
Return a list of strings, quoted appropriately for use in SQL
directly, as well as a set of aliases used in the select statement (if
'as_pairs' is True, return a list of (alias, col_name) pairs instead
of strings as the first component and None as the second component).
"""
result = []
if opts is None:
if (opts := self.query.get_meta()) is None:
return result
only_load = self.deferred_to_columns()
start_alias = start_alias or self.query.get_initial_alias()
# The 'seen_models' is used to optimize checking the needed parent
# alias for a given field. This also includes None -> start_alias to
# be used by local fields.
seen_models = {None: start_alias}
for field in opts.concrete_fields:
model = field.model._meta.concrete_model
# A proxy model will have a different model and concrete_model. We
# will assign None if the field belongs to this model.
if model == opts.model:
model = None
if (
from_parent
and model is not None
and issubclass(
from_parent._meta.concrete_model, model._meta.concrete_model
)
):
# Avoid loading data for already loaded parents.
# We end up here in the case select_related() resolution
# proceeds from parent model to child model. In that case the
# parent model data is already present in the SELECT clause,
# and we want to avoid reloading the same data again.
continue
if field.model in only_load and field.attname not in only_load[field.model]:
continue
alias = self.query.join_parent_model(opts, model, start_alias, seen_models)
column = field.get_col(alias)
result.append(column)
return result
def get_distinct(self):
"""
Return a quoted list of fields to use in DISTINCT ON part of the query.
This method can alter the tables in the query, and thus it must be
called before get_from_clause().
"""
result = []
params = []
opts = self.query.get_meta()
for name in self.query.distinct_fields:
parts = name.split(LOOKUP_SEP)
_, targets, alias, joins, path, _, transform_function = self._setup_joins(
parts, opts, None
)
targets, alias, _ = self.query.trim_joins(targets, joins, path)
for target in targets:
if name in self.query.annotation_select:
result.append(self.connection.ops.quote_name(name))
else:
r, p = self.compile(transform_function(target, alias))
result.append(r)
params.append(p)
return result, params
def find_ordering_name(
self, name, opts, alias=None, default_order="ASC", already_seen=None
):
"""
Return the table alias (the name might be ambiguous, the alias will
not be) and column name for ordering by the given 'name' parameter.
The 'name' is of the form 'field1__field2__...__fieldN'.
"""
name, order = get_order_dir(name, default_order)
descending = order == "DESC"
pieces = name.split(LOOKUP_SEP)
(
field,
targets,
alias,
joins,
path,
opts,
transform_function,
) = self._setup_joins(pieces, opts, alias)
# If we get to this point and the field is a relation to another model,
# append the default ordering for that model unless it is the pk
# shortcut or the attribute name of the field that is specified.
if (
field.is_relation
and opts.ordering
and getattr(field, "attname", None) != pieces[-1]
and name != "pk"
):
# Firstly, avoid infinite loops.
already_seen = already_seen or set()
join_tuple = tuple(
getattr(self.query.alias_map[j], "join_cols", None) for j in joins
)
if join_tuple in already_seen:
raise FieldError("Infinite loop caused by ordering.")
already_seen.add(join_tuple)
results = []
for item in opts.ordering:
if hasattr(item, "resolve_expression") and not isinstance(
item, OrderBy
):
item = item.desc() if descending else item.asc()
if isinstance(item, OrderBy):
results.append((item, False))
continue
results.extend(
self.find_ordering_name(item, opts, alias, order, already_seen)
)
return results
targets, alias, _ = self.query.trim_joins(targets, joins, path)
return [
(OrderBy(transform_function(t, alias), descending=descending), False)
for t in targets
]
def _setup_joins(self, pieces, opts, alias):
"""
Helper method for get_order_by() and get_distinct().
get_ordering() and get_distinct() must produce same target columns on
same input, as the prefixes of get_ordering() and get_distinct() must
match. Executing SQL where this is not true is an error.
"""
alias = alias or self.query.get_initial_alias()
field, targets, opts, joins, path, transform_function = self.query.setup_joins(
pieces, opts, alias
)
alias = joins[-1]
return field, targets, alias, joins, path, opts, transform_function
def get_from_clause(self):
"""
Return a list of strings that are joined together to go after the
"FROM" part of the query, as well as a list any extra parameters that
need to be included. Subclasses, can override this to create a
from-clause via a "select".
This should only be called after any SQL construction methods that
might change the tables that are needed. This means the select columns,
ordering, and distinct must be done first.
"""
result = []
params = []
for alias in tuple(self.query.alias_map):
if not self.query.alias_refcount[alias]:
continue
try:
from_clause = self.query.alias_map[alias]
except KeyError:
# Extra tables can end up in self.tables, but not in the
# alias_map if they aren't in a join. That's OK. We skip them.
continue
clause_sql, clause_params = self.compile(from_clause)
result.append(clause_sql)
params.extend(clause_params)
for t in self.query.extra_tables:
alias, _ = self.query.table_alias(t)
# Only add the alias if it's not already present (the table_alias()
# call increments the refcount, so an alias refcount of one means
# this is the only reference).
if (
alias not in self.query.alias_map
or self.query.alias_refcount[alias] == 1
):
result.append(", %s" % self.quote_name_unless_alias(alias))
return result, params
def get_related_selections(
self,
select,
opts=None,
root_alias=None,
cur_depth=1,
requested=None,
restricted=None,
):
"""
Fill in the information needed for a select_related query. The current
depth is measured as the number of connections away from the root model
(for example, cur_depth=1 means we are looking at models with direct
connections to the root model).
"""
def _get_field_choices():
direct_choices = (f.name for f in opts.fields if f.is_relation)
reverse_choices = (
f.field.related_query_name()
for f in opts.related_objects
if f.field.unique
)
return chain(
direct_choices, reverse_choices, self.query._filtered_relations
)
related_klass_infos = []
if not restricted and cur_depth > self.query.max_depth:
# We've recursed far enough; bail out.
return related_klass_infos
if not opts:
opts = self.query.get_meta()
root_alias = self.query.get_initial_alias()
only_load = self.deferred_to_columns()
# Setup for the case when only particular related fields should be
# included in the related selection.
fields_found = set()
if requested is None:
restricted = isinstance(self.query.select_related, dict)
if restricted:
requested = self.query.select_related
def get_related_klass_infos(klass_info, related_klass_infos):
klass_info["related_klass_infos"] = related_klass_infos
for f in opts.fields:
field_model = f.model._meta.concrete_model
fields_found.add(f.name)
if restricted:
next = requested.get(f.name, {})
if not f.is_relation:
# If a non-related field is used like a relation,
# or if a single non-relational field is given.
if next or f.name in requested:
raise FieldError(
"Non-relational field given in select_related: '%s'. "
"Choices are: %s"
% (
f.name,
", ".join(_get_field_choices()) or "(none)",
)
)
else:
next = False
if not select_related_descend(
f, restricted, requested, only_load.get(field_model)
):
continue
klass_info = {
"model": f.remote_field.model,
"field": f,
"reverse": False,
"local_setter": f.set_cached_value,
"remote_setter": f.remote_field.set_cached_value
if f.unique
else lambda x, y: None,
"from_parent": False,
}
related_klass_infos.append(klass_info)
select_fields = []
_, _, _, joins, _, _ = self.query.setup_joins([f.name], opts, root_alias)
alias = joins[-1]
columns = self.get_default_columns(
start_alias=alias, opts=f.remote_field.model._meta
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info["select_fields"] = select_fields
next_klass_infos = self.get_related_selections(
select,
f.remote_field.model._meta,
alias,
cur_depth + 1,
next,
restricted,
)
get_related_klass_infos(klass_info, next_klass_infos)
if restricted:
related_fields = [
(o.field, o.related_model)
for o in opts.related_objects
if o.field.unique and not o.many_to_many
]
for f, model in related_fields:
if not select_related_descend(
f, restricted, requested, only_load.get(model), reverse=True
):
continue
related_field_name = f.related_query_name()
fields_found.add(related_field_name)
join_info = self.query.setup_joins(
[related_field_name], opts, root_alias
)
alias = join_info.joins[-1]
from_parent = issubclass(model, opts.model) and model is not opts.model
klass_info = {
"model": model,
"field": f,
"reverse": True,
"local_setter": f.remote_field.set_cached_value,
"remote_setter": f.set_cached_value,
"from_parent": from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias, opts=model._meta, from_parent=opts.model
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info["select_fields"] = select_fields
next = requested.get(f.related_query_name(), {})
next_klass_infos = self.get_related_selections(
select, model._meta, alias, cur_depth + 1, next, restricted
)
get_related_klass_infos(klass_info, next_klass_infos)
def local_setter(obj, from_obj):
# Set a reverse fk object when relation is non-empty.
if from_obj:
f.remote_field.set_cached_value(from_obj, obj)
def remote_setter(name, obj, from_obj):
setattr(from_obj, name, obj)
for name in list(requested):
# Filtered relations work only on the topmost level.
if cur_depth > 1:
break
if name in self.query._filtered_relations:
fields_found.add(name)
f, _, join_opts, joins, _, _ = self.query.setup_joins(
[name], opts, root_alias
)
model = join_opts.model
alias = joins[-1]
from_parent = (
issubclass(model, opts.model) and model is not opts.model
)
klass_info = {
"model": model,
"field": f,
"reverse": True,
"local_setter": local_setter,
"remote_setter": partial(remote_setter, name),
"from_parent": from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias,
opts=model._meta,
from_parent=opts.model,
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info["select_fields"] = select_fields
next_requested = requested.get(name, {})
next_klass_infos = self.get_related_selections(
select,
opts=model._meta,
root_alias=alias,
cur_depth=cur_depth + 1,
requested=next_requested,
restricted=restricted,
)
get_related_klass_infos(klass_info, next_klass_infos)
fields_not_found = set(requested).difference(fields_found)
if fields_not_found:
invalid_fields = ("'%s'" % s for s in fields_not_found)
raise FieldError(
"Invalid field name(s) given in select_related: %s. "
"Choices are: %s"
% (
", ".join(invalid_fields),
", ".join(_get_field_choices()) or "(none)",
)
)
return related_klass_infos
def get_select_for_update_of_arguments(self):
"""
Return a quoted list of arguments for the SELECT FOR UPDATE OF part of
the query.
"""
def _get_parent_klass_info(klass_info):
concrete_model = klass_info["model"]._meta.concrete_model
for parent_model, parent_link in concrete_model._meta.parents.items():
parent_list = parent_model._meta.get_parent_list()
yield {
"model": parent_model,
"field": parent_link,
"reverse": False,
"select_fields": [
select_index
for select_index in klass_info["select_fields"]
# Selected columns from a model or its parents.
if (
self.select[select_index][0].target.model == parent_model
or self.select[select_index][0].target.model in parent_list
)
],
}
def _get_first_selected_col_from_model(klass_info):
"""
Find the first selected column from a model. If it doesn't exist,
don't lock a model.
select_fields is filled recursively, so it also contains fields
from the parent models.
"""
concrete_model = klass_info["model"]._meta.concrete_model
for select_index in klass_info["select_fields"]:
if self.select[select_index][0].target.model == concrete_model:
return self.select[select_index][0]
def _get_field_choices():
"""Yield all allowed field paths in breadth-first search order."""
queue = collections.deque([(None, self.klass_info)])
while queue:
parent_path, klass_info = queue.popleft()
if parent_path is None:
path = []
yield "self"
else:
field = klass_info["field"]
if klass_info["reverse"]:
field = field.remote_field
path = parent_path + [field.name]
yield LOOKUP_SEP.join(path)
queue.extend(
(path, klass_info)
for klass_info in _get_parent_klass_info(klass_info)
)
queue.extend(
(path, klass_info)
for klass_info in klass_info.get("related_klass_infos", [])
)
if not self.klass_info:
return []
result = []
invalid_names = []
for name in self.query.select_for_update_of:
klass_info = self.klass_info
if name == "self":
col = _get_first_selected_col_from_model(klass_info)
else:
for part in name.split(LOOKUP_SEP):
klass_infos = (
*klass_info.get("related_klass_infos", []),
*_get_parent_klass_info(klass_info),
)
for related_klass_info in klass_infos:
field = related_klass_info["field"]
if related_klass_info["reverse"]:
field = field.remote_field
if field.name == part:
klass_info = related_klass_info
break
else:
klass_info = None
break
if klass_info is None:
invalid_names.append(name)
continue
col = _get_first_selected_col_from_model(klass_info)
if col is not None:
if self.connection.features.select_for_update_of_column:
result.append(self.compile(col)[0])
else:
result.append(self.quote_name_unless_alias(col.alias))
if invalid_names:
raise FieldError(
"Invalid field name(s) given in select_for_update(of=(...)): %s. "
"Only relational fields followed in the query are allowed. "
"Choices are: %s."
% (
", ".join(invalid_names),
", ".join(_get_field_choices()),
)
)
return result
def deferred_to_columns(self):
"""
Convert the self.deferred_loading data structure to mapping of table
names to sets of column names which are to be loaded. Return the
dictionary.
"""
columns = {}
self.query.deferred_to_data(columns)
return columns
def get_converters(self, expressions):
converters = {}
for i, expression in enumerate(expressions):
if expression:
backend_converters = self.connection.ops.get_db_converters(expression)
field_converters = expression.get_db_converters(self.connection)
if backend_converters or field_converters:
converters[i] = (backend_converters + field_converters, expression)
return converters
def apply_converters(self, rows, converters):
connection = self.connection
converters = list(converters.items())
for row in map(list, rows):
for pos, (convs, expression) in converters:
value = row[pos]
for converter in convs:
value = converter(value, expression, connection)
row[pos] = value
yield row
def results_iter(
self,
results=None,
tuple_expected=False,
chunked_fetch=False,
chunk_size=GET_ITERATOR_CHUNK_SIZE,
):
"""Return an iterator over the results from executing this query."""
if results is None:
results = self.execute_sql(
MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size
)
fields = [s[0] for s in self.select[0 : self.col_count]]
converters = self.get_converters(fields)
rows = chain.from_iterable(results)
if converters:
rows = self.apply_converters(rows, converters)
if tuple_expected:
rows = map(tuple, rows)
return rows
def has_results(self):
"""
Backends (e.g. NoSQL) can override this in order to use optimized
versions of "query has any results."
"""
return bool(self.execute_sql(SINGLE))
def execute_sql(
self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE
):
"""
Run the query against the database and return the result(s). The
return value is a single data item if result_type is SINGLE, or an
iterator over the results if the result_type is MULTI.
result_type is either MULTI (use fetchmany() to retrieve all rows),
SINGLE (only retrieve a single row), or None. In this last case, the
cursor is returned if any query is executed, since it's used by
subclasses such as InsertQuery). It's possible, however, that no query
is needed, as the filters describe an empty set. In that case, None is
returned, to avoid any unnecessary database interaction.
"""
result_type = result_type or NO_RESULTS
try:
sql, params = self.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return iter([])
else:
return
if chunked_fetch:
cursor = self.connection.chunked_cursor()
else:
cursor = self.connection.cursor()
try:
cursor.execute(sql, params)
except Exception:
# Might fail for server-side cursors (e.g. connection closed)
cursor.close()
raise
if result_type == CURSOR:
# Give the caller the cursor to process and close.
return cursor
if result_type == SINGLE:
try:
val = cursor.fetchone()
if val:
return val[0 : self.col_count]
return val
finally:
# done with the cursor
cursor.close()
if result_type == NO_RESULTS:
cursor.close()
return
result = cursor_iter(
cursor,
self.connection.features.empty_fetchmany_value,
self.col_count if self.has_extra_select else None,
chunk_size,
)
if not chunked_fetch or not self.connection.features.can_use_chunked_reads:
# If we are using non-chunked reads, we return the same data
# structure as normally, but ensure it is all read into memory
# before going any further. Use chunked_fetch if requested,
# unless the database doesn't support it.
return list(result)
return result
def as_subquery_condition(self, alias, columns, compiler):
qn = compiler.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
for index, select_col in enumerate(self.query.select):
lhs_sql, lhs_params = self.compile(select_col)
rhs = "%s.%s" % (qn(alias), qn2(columns[index]))
self.query.where.add(RawSQL("%s = %s" % (lhs_sql, rhs), lhs_params), "AND")
sql, params = self.as_sql()
return "EXISTS (%s)" % sql, params
def explain_query(self):
result = list(self.execute_sql())
# Some backends return 1 item tuples with strings, and others return
# tuples with integers and strings. Flatten them out into strings.
format_ = self.query.explain_info.format
output_formatter = json.dumps if format_ and format_.lower() == "json" else str
for row in result[0]:
if not isinstance(row, str):
yield " ".join(output_formatter(c) for c in row)
else:
yield row
class SQLInsertCompiler(SQLCompiler):
returning_fields = None
returning_params = ()
def field_as_sql(self, field, val):
"""
Take a field and a value intended to be saved on that field, and
return placeholder SQL and accompanying params. Check for raw values,
expressions, and fields with get_placeholder() defined in that order.
When field is None, consider the value raw and use it as the
placeholder, with no corresponding parameters returned.
"""
if field is None:
# A field value of None means the value is raw.
sql, params = val, []
elif hasattr(val, "as_sql"):
# This is an expression, let's compile it.
sql, params = self.compile(val)
elif hasattr(field, "get_placeholder"):
# Some fields (e.g. geo fields) need special munging before
# they can be inserted.
sql, params = field.get_placeholder(val, self, self.connection), [val]
else:
# Return the common case for the placeholder
sql, params = "%s", [val]
# The following hook is only used by Oracle Spatial, which sometimes
# needs to yield 'NULL' and [] as its placeholder and params instead
# of '%s' and [None]. The 'NULL' placeholder is produced earlier by
# OracleOperations.get_geom_placeholder(). The following line removes
# the corresponding None parameter. See ticket #10888.
params = self.connection.ops.modify_insert_params(sql, params)
return sql, params
def prepare_value(self, field, value):
"""
Prepare a value to be used in a query by resolving it if it is an
expression and otherwise calling the field's get_db_prep_save().
"""
if hasattr(value, "resolve_expression"):
value = value.resolve_expression(
self.query, allow_joins=False, for_save=True
)
# Don't allow values containing Col expressions. They refer to
# existing columns on a row, but in the case of insert the row
# doesn't exist yet.
if value.contains_column_references:
raise ValueError(
'Failed to insert expression "%s" on %s. F() expressions '
"can only be used to update, not to insert." % (value, field)
)
if value.contains_aggregate:
raise FieldError(
"Aggregate functions are not allowed in this query "
"(%s=%r)." % (field.name, value)
)
if value.contains_over_clause:
raise FieldError(
"Window expressions are not allowed in this query (%s=%r)."
% (field.name, value)
)
else:
value = field.get_db_prep_save(value, connection=self.connection)
return value
def pre_save_val(self, field, obj):
"""
Get the given field's value off the given obj. pre_save() is used for
things like auto_now on DateTimeField. Skip it if this is a raw query.
"""
if self.query.raw:
return getattr(obj, field.attname)
return field.pre_save(obj, add=True)
def assemble_as_sql(self, fields, value_rows):
"""
Take a sequence of N fields and a sequence of M rows of values, and
generate placeholder SQL and parameters for each field and value.
Return a pair containing:
* a sequence of M rows of N SQL placeholder strings, and
* a sequence of M rows of corresponding parameter values.
Each placeholder string may contain any number of '%s' interpolation
strings, and each parameter row will contain exactly as many params
as the total number of '%s's in the corresponding placeholder row.
"""
if not value_rows:
return [], []
# list of (sql, [params]) tuples for each object to be saved
# Shape: [n_objs][n_fields][2]
rows_of_fields_as_sql = (
(self.field_as_sql(field, v) for field, v in zip(fields, row))
for row in value_rows
)
# tuple like ([sqls], [[params]s]) for each object to be saved
# Shape: [n_objs][2][n_fields]
sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql)
# Extract separate lists for placeholders and params.
# Each of these has shape [n_objs][n_fields]
placeholder_rows, param_rows = zip(*sql_and_param_pair_rows)
# Params for each field are still lists, and need to be flattened.
param_rows = [[p for ps in row for p in ps] for row in param_rows]
return placeholder_rows, param_rows
def as_sql(self):
# We don't need quote_name_unless_alias() here, since these are all
# going to be column names (so we can avoid the extra overhead).
qn = self.connection.ops.quote_name
opts = self.query.get_meta()
insert_statement = self.connection.ops.insert_statement(
on_conflict=self.query.on_conflict,
)
result = ["%s %s" % (insert_statement, qn(opts.db_table))]
fields = self.query.fields or [opts.pk]
result.append("(%s)" % ", ".join(qn(f.column) for f in fields))
if self.query.fields:
value_rows = [
[
self.prepare_value(field, self.pre_save_val(field, obj))
for field in fields
]
for obj in self.query.objs
]
else:
# An empty object.
value_rows = [
[self.connection.ops.pk_default_value()] for _ in self.query.objs
]
fields = [None]
# Currently the backends just accept values when generating bulk
# queries and generate their own placeholders. Doing that isn't
# necessary and it should be possible to use placeholders and
# expressions in bulk inserts too.
can_bulk = (
not self.returning_fields and self.connection.features.has_bulk_insert
)
placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows)
on_conflict_suffix_sql = self.connection.ops.on_conflict_suffix_sql(
fields,
self.query.on_conflict,
self.query.update_fields,
self.query.unique_fields,
)
if (
self.returning_fields
and self.connection.features.can_return_columns_from_insert
):
if self.connection.features.can_return_rows_from_bulk_insert:
result.append(
self.connection.ops.bulk_insert_sql(fields, placeholder_rows)
)
params = param_rows
else:
result.append("VALUES (%s)" % ", ".join(placeholder_rows[0]))
params = [param_rows[0]]
if on_conflict_suffix_sql:
result.append(on_conflict_suffix_sql)
# Skip empty r_sql to allow subclasses to customize behavior for
# 3rd party backends. Refs #19096.
r_sql, self.returning_params = self.connection.ops.return_insert_columns(
self.returning_fields
)
if r_sql:
result.append(r_sql)
params += [self.returning_params]
return [(" ".join(result), tuple(chain.from_iterable(params)))]
if can_bulk:
result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
if on_conflict_suffix_sql:
result.append(on_conflict_suffix_sql)
return [(" ".join(result), tuple(p for ps in param_rows for p in ps))]
else:
if on_conflict_suffix_sql:
result.append(on_conflict_suffix_sql)
return [
(" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
for p, vals in zip(placeholder_rows, param_rows)
]
def execute_sql(self, returning_fields=None):
assert not (
returning_fields
and len(self.query.objs) != 1
and not self.connection.features.can_return_rows_from_bulk_insert
)
opts = self.query.get_meta()
self.returning_fields = returning_fields
with self.connection.cursor() as cursor:
for sql, params in self.as_sql():
cursor.execute(sql, params)
if not self.returning_fields:
return []
if (
self.connection.features.can_return_rows_from_bulk_insert
and len(self.query.objs) > 1
):
rows = self.connection.ops.fetch_returned_insert_rows(cursor)
elif self.connection.features.can_return_columns_from_insert:
assert len(self.query.objs) == 1
rows = [
self.connection.ops.fetch_returned_insert_columns(
cursor,
self.returning_params,
)
]
else:
rows = [
(
self.connection.ops.last_insert_id(
cursor,
opts.db_table,
opts.pk.column,
),
)
]
cols = [field.get_col(opts.db_table) for field in self.returning_fields]
converters = self.get_converters(cols)
if converters:
rows = list(self.apply_converters(rows, converters))
return rows
class SQLDeleteCompiler(SQLCompiler):
@cached_property
def single_alias(self):
# Ensure base table is in aliases.
self.query.get_initial_alias()
return sum(self.query.alias_refcount[t] > 0 for t in self.query.alias_map) == 1
@classmethod
def _expr_refs_base_model(cls, expr, base_model):
if isinstance(expr, Query):
return expr.model == base_model
if not hasattr(expr, "get_source_expressions"):
return False
return any(
cls._expr_refs_base_model(source_expr, base_model)
for source_expr in expr.get_source_expressions()
)
@cached_property
def contains_self_reference_subquery(self):
return any(
self._expr_refs_base_model(expr, self.query.model)
for expr in chain(
self.query.annotations.values(), self.query.where.children
)
)
def _as_sql(self, query):
result = ["DELETE FROM %s" % self.quote_name_unless_alias(query.base_table)]
where, params = self.compile(query.where)
if where:
result.append("WHERE %s" % where)
return " ".join(result), tuple(params)
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
if self.single_alias and not self.contains_self_reference_subquery:
return self._as_sql(self.query)
innerq = self.query.clone()
innerq.__class__ = Query
innerq.clear_select_clause()
pk = self.query.model._meta.pk
innerq.select = [pk.get_col(self.query.get_initial_alias())]
outerq = Query(self.query.model)
if not self.connection.features.update_can_self_select:
# Force the materialization of the inner query to allow reference
# to the target table on MySQL.
sql, params = innerq.get_compiler(connection=self.connection).as_sql()
innerq = RawSQL("SELECT * FROM (%s) subquery" % sql, params)
outerq.add_filter("pk__in", innerq)
return self._as_sql(outerq)
class SQLUpdateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
self.pre_sql_setup()
if not self.query.values:
return "", ()
qn = self.quote_name_unless_alias
values, update_params = [], []
for field, model, val in self.query.values:
if hasattr(val, "resolve_expression"):
val = val.resolve_expression(
self.query, allow_joins=False, for_save=True
)
if val.contains_aggregate:
raise FieldError(
"Aggregate functions are not allowed in this query "
"(%s=%r)." % (field.name, val)
)
if val.contains_over_clause:
raise FieldError(
"Window expressions are not allowed in this query "
"(%s=%r)." % (field.name, val)
)
elif hasattr(val, "prepare_database_save"):
if field.remote_field:
val = field.get_db_prep_save(
val.prepare_database_save(field),
connection=self.connection,
)
else:
raise TypeError(
"Tried to update field %s with a model instance, %r. "
"Use a value compatible with %s."
% (field, val, field.__class__.__name__)
)
else:
val = field.get_db_prep_save(val, connection=self.connection)
# Getting the placeholder for the field.
if hasattr(field, "get_placeholder"):
placeholder = field.get_placeholder(val, self, self.connection)
else:
placeholder = "%s"
name = field.column
if hasattr(val, "as_sql"):
sql, params = self.compile(val)
values.append("%s = %s" % (qn(name), placeholder % sql))
update_params.extend(params)
elif val is not None:
values.append("%s = %s" % (qn(name), placeholder))
update_params.append(val)
else:
values.append("%s = NULL" % qn(name))
table = self.query.base_table
result = [
"UPDATE %s SET" % qn(table),
", ".join(values),
]
where, params = self.compile(self.query.where)
if where:
result.append("WHERE %s" % where)
return " ".join(result), tuple(update_params + params)
def execute_sql(self, result_type):
"""
Execute the specified update. Return the number of rows affected by
the primary update query. The "primary update query" is the first
non-empty query that is executed. Row counts for any subsequent,
related queries are not available.
"""
cursor = super().execute_sql(result_type)
try:
rows = cursor.rowcount if cursor else 0
is_empty = cursor is None
finally:
if cursor:
cursor.close()
for query in self.query.get_related_updates():
aux_rows = query.get_compiler(self.using).execute_sql(result_type)
if is_empty and aux_rows:
rows = aux_rows
is_empty = False
return rows
def pre_sql_setup(self):
"""
If the update depends on results from other tables, munge the "where"
conditions to match the format required for (portable) SQL updates.
If multiple updates are required, pull out the id values to update at
this point so that they don't change as a result of the progressive
updates.
"""
refcounts_before = self.query.alias_refcount.copy()
# Ensure base table is in the query
self.query.get_initial_alias()
count = self.query.count_active_tables()
if not self.query.related_updates and count == 1:
return
query = self.query.chain(klass=Query)
query.select_related = False
query.clear_ordering(force=True)
query.extra = {}
query.select = []
meta = query.get_meta()
fields = [meta.pk.name]
related_ids_index = []
for related in self.query.related_updates:
if all(
path.join_field.primary_key for path in meta.get_path_to_parent(related)
):
# If a primary key chain exists to the targeted related update,
# then the meta.pk value can be used for it.
related_ids_index.append((related, 0))
else:
# This branch will only be reached when updating a field of an
# ancestor that is not part of the primary key chain of a MTI
# tree.
related_ids_index.append((related, len(fields)))
fields.append(related._meta.pk.name)
query.add_fields(fields)
super().pre_sql_setup()
must_pre_select = (
count > 1 and not self.connection.features.update_can_self_select
)
# Now we adjust the current query: reset the where clause and get rid
# of all the tables we don't need (since they're in the sub-select).
self.query.clear_where()
if self.query.related_updates or must_pre_select:
# Either we're using the idents in multiple update queries (so
# don't want them to change), or the db backend doesn't support
# selecting from the updating table (e.g. MySQL).
idents = []
related_ids = collections.defaultdict(list)
for rows in query.get_compiler(self.using).execute_sql(MULTI):
idents.extend(r[0] for r in rows)
for parent, index in related_ids_index:
related_ids[parent].extend(r[index] for r in rows)
self.query.add_filter("pk__in", idents)
self.query.related_ids = related_ids
else:
# The fast path. Filters and updates in one query.
self.query.add_filter("pk__in", query)
self.query.reset_refcounts(refcounts_before)
class SQLAggregateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
sql, params = [], []
for annotation in self.query.annotation_select.values():
ann_sql, ann_params = self.compile(annotation)
ann_sql, ann_params = annotation.select_format(self, ann_sql, ann_params)
sql.append(ann_sql)
params.extend(ann_params)
self.col_count = len(self.query.annotation_select)
sql = ", ".join(sql)
params = tuple(params)
inner_query_sql, inner_query_params = self.query.inner_query.get_compiler(
self.using,
elide_empty=self.elide_empty,
).as_sql(with_col_aliases=True)
sql = "SELECT %s FROM (%s) subquery" % (sql, inner_query_sql)
params = params + inner_query_params
return sql, params
def cursor_iter(cursor, sentinel, col_count, itersize):
"""
Yield blocks of rows from a cursor and ensure the cursor is closed when
done.
"""
try:
for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel):
yield rows if col_count is None else [r[:col_count] for r in rows]
finally:
cursor.close()
|
1e1ac88c24a553620d8de4d290ccf88d1d9588bf3babadfa210582308c0631bd | import copy
import datetime
import re
from django.db import DatabaseError
from django.db.backends.base.schema import (
BaseDatabaseSchemaEditor,
_related_non_m2m_objects,
)
from django.utils.duration import duration_iso_string
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_create_column = "ALTER TABLE %(table)s ADD %(column)s %(definition)s"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_alter_column_null = "MODIFY %(column)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s NOT NULL"
sql_alter_column_default = "MODIFY %(column)s DEFAULT %(default)s"
sql_alter_column_no_default = "MODIFY %(column)s DEFAULT NULL"
sql_alter_column_no_default_null = sql_alter_column_no_default
sql_alter_column_collate = "MODIFY %(column)s %(type)s%(collation)s"
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"
sql_create_column_inline_fk = (
"CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s"
)
sql_delete_table = "DROP TABLE %(table)s CASCADE CONSTRAINTS"
sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s"
def quote_value(self, value):
if isinstance(value, (datetime.date, datetime.time, datetime.datetime)):
return "'%s'" % value
elif isinstance(value, datetime.timedelta):
return "'%s'" % duration_iso_string(value)
elif isinstance(value, str):
return "'%s'" % value.replace("'", "''").replace("%", "%%")
elif isinstance(value, (bytes, bytearray, memoryview)):
return "'%s'" % value.hex()
elif isinstance(value, bool):
return "1" if value else "0"
else:
return str(value)
def remove_field(self, model, field):
# If the column is an identity column, drop the identity before
# removing the field.
if self._is_identity_column(model._meta.db_table, field.column):
self._drop_identity(model._meta.db_table, field.column)
super().remove_field(model, field)
def delete_model(self, model):
# Run superclass action
super().delete_model(model)
# Clean up manually created sequence.
self.execute(
"""
DECLARE
i INTEGER;
BEGIN
SELECT COUNT(1) INTO i FROM USER_SEQUENCES
WHERE SEQUENCE_NAME = '%(sq_name)s';
IF i = 1 THEN
EXECUTE IMMEDIATE 'DROP SEQUENCE "%(sq_name)s"';
END IF;
END;
/"""
% {
"sq_name": self.connection.ops._get_no_autofield_sequence_name(
model._meta.db_table
)
}
)
def alter_field(self, model, old_field, new_field, strict=False):
try:
super().alter_field(model, old_field, new_field, strict)
except DatabaseError as e:
description = str(e)
# If we're changing type to an unsupported type we need a
# SQLite-ish workaround
if "ORA-22858" in description or "ORA-22859" in description:
self._alter_field_type_workaround(model, old_field, new_field)
# If an identity column is changing to a non-numeric type, drop the
# identity first.
elif "ORA-30675" in description:
self._drop_identity(model._meta.db_table, old_field.column)
self.alter_field(model, old_field, new_field, strict)
# If a primary key column is changing to an identity column, drop
# the primary key first.
elif "ORA-30673" in description and old_field.primary_key:
self._delete_primary_key(model, strict=True)
self._alter_field_type_workaround(model, old_field, new_field)
# If a collation is changing on a primary key, drop the primary key
# first.
elif "ORA-43923" in description and old_field.primary_key:
self._delete_primary_key(model, strict=True)
self.alter_field(model, old_field, new_field, strict)
# Restore a primary key, if needed.
if new_field.primary_key:
self.execute(self._create_primary_key_sql(model, new_field))
else:
raise
def _alter_field_type_workaround(self, model, old_field, new_field):
"""
Oracle refuses to change from some type to other type.
What we need to do instead is:
- Add a nullable version of the desired field with a temporary name. If
the new column is an auto field, then the temporary column can't be
nullable.
- Update the table to transfer values from old to new
- Drop old column
- Rename the new column and possibly drop the nullable property
"""
# Make a new field that's like the new one but with a temporary
# column name.
new_temp_field = copy.deepcopy(new_field)
new_temp_field.null = new_field.get_internal_type() not in (
"AutoField",
"BigAutoField",
"SmallAutoField",
)
new_temp_field.column = self._generate_temp_name(new_field.column)
# Add it
self.add_field(model, new_temp_field)
# Explicit data type conversion
# https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf
# /Data-Type-Comparison-Rules.html#GUID-D0C5A47E-6F93-4C2D-9E49-4F2B86B359DD
new_value = self.quote_name(old_field.column)
old_type = old_field.db_type(self.connection)
if re.match("^N?CLOB", old_type):
new_value = "TO_CHAR(%s)" % new_value
old_type = "VARCHAR2"
if re.match("^N?VARCHAR2", old_type):
new_internal_type = new_field.get_internal_type()
if new_internal_type == "DateField":
new_value = "TO_DATE(%s, 'YYYY-MM-DD')" % new_value
elif new_internal_type == "DateTimeField":
new_value = "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')" % new_value
elif new_internal_type == "TimeField":
# TimeField are stored as TIMESTAMP with a 1900-01-01 date part.
new_value = "CONCAT('1900-01-01 ', %s)" % new_value
new_value = "TO_TIMESTAMP(%s, 'YYYY-MM-DD HH24:MI:SS.FF')" % new_value
# Transfer values across
self.execute(
"UPDATE %s set %s=%s"
% (
self.quote_name(model._meta.db_table),
self.quote_name(new_temp_field.column),
new_value,
)
)
# Drop the old field
self.remove_field(model, old_field)
# Rename and possibly make the new field NOT NULL
super().alter_field(model, new_temp_field, new_field)
# Recreate foreign key (if necessary) because the old field is not
# passed to the alter_field() and data types of new_temp_field and
# new_field always match.
new_type = new_field.db_type(self.connection)
if (
(old_field.primary_key and new_field.primary_key)
or (old_field.unique and new_field.unique)
) and old_type != new_type:
for _, rel in _related_non_m2m_objects(new_temp_field, new_field):
if rel.field.db_constraint:
self.execute(
self._create_fk_sql(rel.related_model, rel.field, "_fk")
)
def _alter_column_type_sql(self, model, old_field, new_field, new_type):
auto_field_types = {"AutoField", "BigAutoField", "SmallAutoField"}
# Drop the identity if migrating away from AutoField.
if (
old_field.get_internal_type() in auto_field_types
and new_field.get_internal_type() not in auto_field_types
and self._is_identity_column(model._meta.db_table, new_field.column)
):
self._drop_identity(model._meta.db_table, new_field.column)
return super()._alter_column_type_sql(model, old_field, new_field, new_type)
def normalize_name(self, name):
"""
Get the properly shortened and uppercased identifier as returned by
quote_name() but without the quotes.
"""
nn = self.quote_name(name)
if nn[0] == '"' and nn[-1] == '"':
nn = nn[1:-1]
return nn
def _generate_temp_name(self, for_name):
"""Generate temporary names for workarounds that need temp columns."""
suffix = hex(hash(for_name)).upper()[1:]
return self.normalize_name(for_name + "_" + suffix)
def prepare_default(self, value):
return self.quote_value(value)
def _field_should_be_indexed(self, model, field):
create_index = super()._field_should_be_indexed(model, field)
db_type = field.db_type(self.connection)
if (
db_type is not None
and db_type.lower() in self.connection._limited_data_types
):
return False
return create_index
def _is_identity_column(self, table_name, column_name):
with self.connection.cursor() as cursor:
cursor.execute(
"""
SELECT
CASE WHEN identity_column = 'YES' THEN 1 ELSE 0 END
FROM user_tab_cols
WHERE table_name = %s AND
column_name = %s
""",
[self.normalize_name(table_name), self.normalize_name(column_name)],
)
row = cursor.fetchone()
return row[0] if row else False
def _drop_identity(self, table_name, column_name):
self.execute(
"ALTER TABLE %(table)s MODIFY %(column)s DROP IDENTITY"
% {
"table": self.quote_name(table_name),
"column": self.quote_name(column_name),
}
)
def _get_default_collation(self, table_name):
with self.connection.cursor() as cursor:
cursor.execute(
"""
SELECT default_collation FROM user_tables WHERE table_name = %s
""",
[self.normalize_name(table_name)],
)
return cursor.fetchone()[0]
def _alter_column_collation_sql(self, model, new_field, new_type, new_collation):
if new_collation is None:
new_collation = self._get_default_collation(model._meta.db_table)
return super()._alter_column_collation_sql(
model, new_field, new_type, new_collation
)
|
b16cadcba7d4b545930ef4c37113688cf661fcf0cbb8c78cf69f85166dac2588 | import datetime
import decimal
from importlib import import_module
import sqlparse
from django.conf import settings
from django.db import NotSupportedError, transaction
from django.db.backends import utils
from django.utils import timezone
from django.utils.encoding import force_str
class BaseDatabaseOperations:
"""
Encapsulate backend-specific differences, such as the way a backend
performs ordering or calculates the ID of a recently-inserted row.
"""
compiler_module = "django.db.models.sql.compiler"
# Integer field safe ranges by `internal_type` as documented
# in docs/ref/models/fields.txt.
integer_field_ranges = {
"SmallIntegerField": (-32768, 32767),
"IntegerField": (-2147483648, 2147483647),
"BigIntegerField": (-9223372036854775808, 9223372036854775807),
"PositiveBigIntegerField": (0, 9223372036854775807),
"PositiveSmallIntegerField": (0, 32767),
"PositiveIntegerField": (0, 2147483647),
"SmallAutoField": (-32768, 32767),
"AutoField": (-2147483648, 2147483647),
"BigAutoField": (-9223372036854775808, 9223372036854775807),
}
set_operators = {
"union": "UNION",
"intersection": "INTERSECT",
"difference": "EXCEPT",
}
# Mapping of Field.get_internal_type() (typically the model field's class
# name) to the data type to use for the Cast() function, if different from
# DatabaseWrapper.data_types.
cast_data_types = {}
# CharField data type if the max_length argument isn't provided.
cast_char_field_without_max_length = None
# Start and end points for window expressions.
PRECEDING = "PRECEDING"
FOLLOWING = "FOLLOWING"
UNBOUNDED_PRECEDING = "UNBOUNDED " + PRECEDING
UNBOUNDED_FOLLOWING = "UNBOUNDED " + FOLLOWING
CURRENT_ROW = "CURRENT ROW"
# Prefix for EXPLAIN queries, or None EXPLAIN isn't supported.
explain_prefix = None
def __init__(self, connection):
self.connection = connection
self._cache = None
def autoinc_sql(self, table, column):
"""
Return any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary.
This SQL is executed when a table is created.
"""
return None
def bulk_batch_size(self, fields, objs):
"""
Return the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
"""
return len(objs)
def format_for_duration_arithmetic(self, sql):
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a "
"format_for_duration_arithmetic() method."
)
def cache_key_culling_sql(self):
"""
Return an SQL query that retrieves the first cache key greater than the
n smallest.
This is used by the 'db' cache backend to determine where to start
culling.
"""
cache_key = self.quote_name("cache_key")
return f"SELECT {cache_key} FROM %s ORDER BY {cache_key} LIMIT 1 OFFSET %%s"
def unification_cast_sql(self, output_field):
"""
Given a field instance, return the SQL that casts the result of a union
to that type. The resulting string should contain a '%s' placeholder
for the expression being cast.
"""
return "%s"
def date_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
extracts a value from the given date field field_name.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a date_extract_sql() "
"method"
)
def date_trunc_sql(self, lookup_type, field_name, tzname=None):
"""
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
truncates the given date or datetime field field_name to a date object
with only the given specificity.
If `tzname` is provided, the given value is truncated in a specific
timezone.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a date_trunc_sql() "
"method."
)
def datetime_cast_date_sql(self, field_name, tzname):
"""
Return the SQL to cast a datetime value to date value.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a "
"datetime_cast_date_sql() method."
)
def datetime_cast_time_sql(self, field_name, tzname):
"""
Return the SQL to cast a datetime value to time value.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a "
"datetime_cast_time_sql() method"
)
def datetime_extract_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that extracts a value from the given
datetime field field_name.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a datetime_extract_sql() "
"method"
)
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that truncates the given datetime field
field_name to a datetime object with only the given specificity.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a datetime_trunc_sql() "
"method"
)
def time_trunc_sql(self, lookup_type, field_name, tzname=None):
"""
Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
that truncates the given time or datetime field field_name to a time
object with only the given specificity.
If `tzname` is provided, the given value is truncated in a specific
timezone.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a time_trunc_sql() method"
)
def time_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
that extracts a value from the given time field field_name.
"""
return self.date_extract_sql(lookup_type, field_name)
def deferrable_sql(self):
"""
Return the SQL to make a constraint "initially deferred" during a
CREATE TABLE statement.
"""
return ""
def distinct_sql(self, fields, params):
"""
Return an SQL DISTINCT clause which removes duplicate rows from the
result set. If any fields are given, only check the given fields for
duplicates.
"""
if fields:
raise NotSupportedError(
"DISTINCT ON fields is not supported by this database backend"
)
else:
return ["DISTINCT"], []
def fetch_returned_insert_columns(self, cursor, returning_params):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the newly created data.
"""
return cursor.fetchone()
def field_cast_sql(self, db_type, internal_type):
"""
Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
(e.g. 'GenericIPAddressField'), return the SQL to cast it before using
it in a WHERE statement. The resulting string should contain a '%s'
placeholder for the column being searched against.
"""
return "%s"
def force_no_ordering(self):
"""
Return a list used in the "ORDER BY" clause to force no ordering at
all. Return an empty list to include nothing in the ordering.
"""
return []
def for_update_sql(self, nowait=False, skip_locked=False, of=(), no_key=False):
"""
Return the FOR UPDATE SQL clause to lock rows for an update operation.
"""
return "FOR%s UPDATE%s%s%s" % (
" NO KEY" if no_key else "",
" OF %s" % ", ".join(of) if of else "",
" NOWAIT" if nowait else "",
" SKIP LOCKED" if skip_locked else "",
)
def _get_limit_offset_params(self, low_mark, high_mark):
offset = low_mark or 0
if high_mark is not None:
return (high_mark - offset), offset
elif offset:
return self.connection.ops.no_limit_value(), offset
return None, offset
def limit_offset_sql(self, low_mark, high_mark):
"""Return LIMIT/OFFSET SQL clause."""
limit, offset = self._get_limit_offset_params(low_mark, high_mark)
return " ".join(
sql
for sql in (
("LIMIT %d" % limit) if limit else None,
("OFFSET %d" % offset) if offset else None,
)
if sql
)
def last_executed_query(self, cursor, sql, params):
"""
Return a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
"""
# Convert params to contain string values.
def to_string(s):
return force_str(s, strings_only=True, errors="replace")
if isinstance(params, (list, tuple)):
u_params = tuple(to_string(val) for val in params)
elif params is None:
u_params = ()
else:
u_params = {to_string(k): to_string(v) for k, v in params.items()}
return "QUERY = %r - PARAMS = %r" % (sql, u_params)
def last_insert_id(self, cursor, table_name, pk_name):
"""
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, return the newly created ID.
`pk_name` is the name of the primary-key column.
"""
return cursor.lastrowid
def lookup_cast(self, lookup_type, internal_type=None):
"""
Return the string to use in a query when performing lookups
("contains", "like", etc.). It should contain a '%s' placeholder for
the column being searched against.
"""
return "%s"
def max_in_list_size(self):
"""
Return the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
"""
return None
def max_name_length(self):
"""
Return the maximum length of table and column names, or None if there
is no limit.
"""
return None
def no_limit_value(self):
"""
Return the value to use for the LIMIT when we are wanting "LIMIT
infinity". Return None if the limit clause can be omitted in this case.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a no_limit_value() method"
)
def pk_default_value(self):
"""
Return the value to use during an INSERT statement to specify that
the field should use its default value.
"""
return "DEFAULT"
def prepare_sql_script(self, sql):
"""
Take an SQL script that may contain multiple lines and return a list
of statements to feed to successive cursor.execute() calls.
Since few databases are able to process raw SQL scripts in a single
cursor.execute() call and PEP 249 doesn't talk about this use case,
the default implementation is conservative.
"""
return [
sqlparse.format(statement, strip_comments=True)
for statement in sqlparse.split(sql)
if statement
]
def process_clob(self, value):
"""
Return the value of a CLOB column, for backends that return a locator
object that requires additional processing.
"""
return value
def return_insert_columns(self, fields):
"""
For backends that support returning columns as part of an insert query,
return the SQL and params to append to the INSERT query. The returned
fragment should contain a format string to hold the appropriate column.
"""
pass
def compiler(self, compiler_name):
"""
Return the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
"""
if self._cache is None:
self._cache = import_module(self.compiler_module)
return getattr(self._cache, compiler_name)
def quote_name(self, name):
"""
Return a quoted version of the given table, index, or column name. Do
not quote the given name if it's already been quoted.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a quote_name() method"
)
def regex_lookup(self, lookup_type):
"""
Return the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). It should contain a '%s'
placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), raise
NotImplementedError.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations may require a regex_lookup() method"
)
def savepoint_create_sql(self, sid):
"""
Return the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
"""
return "SAVEPOINT %s" % self.quote_name(sid)
def savepoint_commit_sql(self, sid):
"""
Return the SQL for committing the given savepoint.
"""
return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
def savepoint_rollback_sql(self, sid):
"""
Return the SQL for rolling back the given savepoint.
"""
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
def set_time_zone_sql(self):
"""
Return the SQL that will set the connection's time zone.
Return '' if the backend doesn't support time zones.
"""
return ""
def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
"""
Return a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
If `reset_sequences` is True, the list includes SQL statements required
to reset the sequences.
The `allow_cascade` argument determines whether truncation may cascade
to tables with foreign keys pointing the tables being truncated.
PostgreSQL requires a cascade even if these tables are empty.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseOperations must provide an sql_flush() method"
)
def execute_sql_flush(self, sql_list):
"""Execute a list of SQL statements to flush the database."""
with transaction.atomic(
using=self.connection.alias,
savepoint=self.connection.features.can_rollback_ddl,
):
with self.connection.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql)
def sequence_reset_by_name_sql(self, style, sequences):
"""
Return a list of the SQL statements required to reset sequences
passed in `sequences`.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return []
def sequence_reset_sql(self, style, model_list):
"""
Return a list of the SQL statements required to reset sequences for
the given models.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return [] # No sequence reset required by default.
def start_transaction_sql(self):
"""Return the SQL statement required to start a transaction."""
return "BEGIN;"
def end_transaction_sql(self, success=True):
"""Return the SQL statement required to end a transaction."""
if not success:
return "ROLLBACK;"
return "COMMIT;"
def tablespace_sql(self, tablespace, inline=False):
"""
Return the SQL that will be used in a query to define the tablespace.
Return '' if the backend doesn't support tablespaces.
If `inline` is True, append the SQL to a row; otherwise append it to
the entire CREATE TABLE or CREATE INDEX statement.
"""
return ""
def prep_for_like_query(self, x):
"""Prepare a value for use in a LIKE query."""
return str(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_")
# Same as prep_for_like_query(), but called for "iexact" matches, which
# need not necessarily be implemented using "LIKE" in the backend.
prep_for_iexact_query = prep_for_like_query
def validate_autopk_value(self, value):
"""
Certain backends do not accept some values for "serial" fields
(for example zero in MySQL). Raise a ValueError if the value is
invalid, otherwise return the validated value.
"""
return value
def adapt_unknown_value(self, value):
"""
Transform a value to something compatible with the backend driver.
This method only depends on the type of the value. It's designed for
cases where the target type isn't known, such as .raw() SQL queries.
As a consequence it may not work perfectly in all circumstances.
"""
if isinstance(value, datetime.datetime): # must be before date
return self.adapt_datetimefield_value(value)
elif isinstance(value, datetime.date):
return self.adapt_datefield_value(value)
elif isinstance(value, datetime.time):
return self.adapt_timefield_value(value)
elif isinstance(value, decimal.Decimal):
return self.adapt_decimalfield_value(value)
else:
return value
def adapt_datefield_value(self, value):
"""
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
"""
if value is None:
return None
return str(value)
def adapt_datetimefield_value(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
"""
if value is None:
return None
# Expression values are adapted by the database.
if hasattr(value, "resolve_expression"):
return value
return str(value)
def adapt_timefield_value(self, value):
"""
Transform a time value to an object compatible with what is expected
by the backend driver for time columns.
"""
if value is None:
return None
# Expression values are adapted by the database.
if hasattr(value, "resolve_expression"):
return value
if timezone.is_aware(value):
raise ValueError("Django does not support timezone-aware times.")
return str(value)
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
"""
Transform a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
"""
return utils.format_number(value, max_digits, decimal_places)
def adapt_ipaddressfield_value(self, value):
"""
Transform a string representation of an IP address into the expected
type for the backend driver.
"""
return value or None
def year_lookup_bounds_for_date_field(self, value, iso_year=False):
"""
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year
lookup.
`value` is an int, containing the looked-up year.
If `iso_year` is True, return bounds for ISO-8601 week-numbering years.
"""
if iso_year:
first = datetime.date.fromisocalendar(value, 1, 1)
second = datetime.date.fromisocalendar(
value + 1, 1, 1
) - datetime.timedelta(days=1)
else:
first = datetime.date(value, 1, 1)
second = datetime.date(value, 12, 31)
first = self.adapt_datefield_value(first)
second = self.adapt_datefield_value(second)
return [first, second]
def year_lookup_bounds_for_datetime_field(self, value, iso_year=False):
"""
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateTimeField value using a year
lookup.
`value` is an int, containing the looked-up year.
If `iso_year` is True, return bounds for ISO-8601 week-numbering years.
"""
if iso_year:
first = datetime.datetime.fromisocalendar(value, 1, 1)
second = datetime.datetime.fromisocalendar(
value + 1, 1, 1
) - datetime.timedelta(microseconds=1)
else:
first = datetime.datetime(value, 1, 1)
second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
if settings.USE_TZ:
tz = timezone.get_current_timezone()
first = timezone.make_aware(first, tz)
second = timezone.make_aware(second, tz)
first = self.adapt_datetimefield_value(first)
second = self.adapt_datetimefield_value(second)
return [first, second]
def get_db_converters(self, expression):
"""
Return a list of functions needed to convert field data.
Some field types on some backends do not provide data in the correct
format, this is the hook for converter functions.
"""
return []
def convert_durationfield_value(self, value, expression, connection):
if value is not None:
return datetime.timedelta(0, 0, value)
def check_expression_support(self, expression):
"""
Check that the backend supports the provided expression.
This is used on specific backends to rule out known expressions
that have problematic or nonexistent implementations. If the
expression has a known problem, the backend should raise
NotSupportedError.
"""
pass
def conditional_expression_supported_in_where_clause(self, expression):
"""
Return True, if the conditional expression is supported in the WHERE
clause.
"""
return True
def combine_expression(self, connector, sub_expressions):
"""
Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions).
"""
conn = " %s " % connector
return conn.join(sub_expressions)
def combine_duration_expression(self, connector, sub_expressions):
return self.combine_expression(connector, sub_expressions)
def binary_placeholder_sql(self, value):
"""
Some backends require special syntax to insert binary content (MySQL
for example uses '_binary %s').
"""
return "%s"
def modify_insert_params(self, placeholder, params):
"""
Allow modification of insert parameters. Needed for Oracle Spatial
backend due to #10888.
"""
return params
def integer_field_range(self, internal_type):
"""
Given an integer field internal type (e.g. 'PositiveIntegerField'),
return a tuple of the (min_value, max_value) form representing the
range of the column type bound to the field.
"""
return self.integer_field_ranges[internal_type]
def subtract_temporals(self, internal_type, lhs, rhs):
if self.connection.features.supports_temporal_subtraction:
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
return "(%s - %s)" % (lhs_sql, rhs_sql), (*lhs_params, *rhs_params)
raise NotSupportedError(
"This backend does not support %s subtraction." % internal_type
)
def window_frame_start(self, start):
if isinstance(start, int):
if start < 0:
return "%d %s" % (abs(start), self.PRECEDING)
elif start == 0:
return self.CURRENT_ROW
elif start is None:
return self.UNBOUNDED_PRECEDING
raise ValueError(
"start argument must be a negative integer, zero, or None, but got '%s'."
% start
)
def window_frame_end(self, end):
if isinstance(end, int):
if end == 0:
return self.CURRENT_ROW
elif end > 0:
return "%d %s" % (end, self.FOLLOWING)
elif end is None:
return self.UNBOUNDED_FOLLOWING
raise ValueError(
"end argument must be a positive integer, zero, or None, but got '%s'."
% end
)
def window_frame_rows_start_end(self, start=None, end=None):
"""
Return SQL for start and end points in an OVER clause window frame.
"""
if not self.connection.features.supports_over_clause:
raise NotSupportedError("This backend does not support window expressions.")
return self.window_frame_start(start), self.window_frame_end(end)
def window_frame_range_start_end(self, start=None, end=None):
start_, end_ = self.window_frame_rows_start_end(start, end)
features = self.connection.features
if features.only_supports_unbounded_with_preceding_and_following and (
(start and start < 0) or (end and end > 0)
):
raise NotSupportedError(
"%s only supports UNBOUNDED together with PRECEDING and "
"FOLLOWING." % self.connection.display_name
)
return start_, end_
def explain_query_prefix(self, format=None, **options):
if not self.connection.features.supports_explaining_query_execution:
raise NotSupportedError(
"This backend does not support explaining query execution."
)
if format:
supported_formats = self.connection.features.supported_explain_formats
normalized_format = format.upper()
if normalized_format not in supported_formats:
msg = "%s is not a recognized format." % normalized_format
if supported_formats:
msg += " Allowed formats: %s" % ", ".join(sorted(supported_formats))
else:
msg += (
f" {self.connection.display_name} does not support any formats."
)
raise ValueError(msg)
if options:
raise ValueError("Unknown options: %s" % ", ".join(sorted(options.keys())))
return self.explain_prefix
def insert_statement(self, on_conflict=None):
return "INSERT INTO"
def on_conflict_suffix_sql(self, fields, on_conflict, update_fields, unique_fields):
return ""
|
7d6130fa0cec2694915e27fb7c6e54db19edde01d1cae32e95f5e6fcf1502680 | import logging
from datetime import datetime
from django.db.backends.ddl_references import (
Columns,
Expressions,
ForeignKeyName,
IndexName,
Statement,
Table,
)
from django.db.backends.utils import names_digest, split_identifier
from django.db.models import Deferrable, Index
from django.db.models.sql import Query
from django.db.transaction import TransactionManagementError, atomic
from django.utils import timezone
logger = logging.getLogger("django.db.backends.schema")
def _is_relevant_relation(relation, altered_field):
"""
When altering the given field, must constraints on its model from the given
relation be temporarily dropped?
"""
field = relation.field
if field.many_to_many:
# M2M reverse field
return False
if altered_field.primary_key and field.to_fields == [None]:
# Foreign key constraint on the primary key, which is being altered.
return True
# Is the constraint targeting the field being altered?
return altered_field.name in field.to_fields
def _all_related_fields(model):
return model._meta._get_fields(
forward=False,
reverse=True,
include_hidden=True,
include_parents=False,
)
def _related_non_m2m_objects(old_field, new_field):
# Filter out m2m objects from reverse relations.
# Return (old_relation, new_relation) tuples.
related_fields = zip(
(
obj
for obj in _all_related_fields(old_field.model)
if _is_relevant_relation(obj, old_field)
),
(
obj
for obj in _all_related_fields(new_field.model)
if _is_relevant_relation(obj, new_field)
),
)
for old_rel, new_rel in related_fields:
yield old_rel, new_rel
yield from _related_non_m2m_objects(
old_rel.remote_field,
new_rel.remote_field,
)
class BaseDatabaseSchemaEditor:
"""
This class and its subclasses are responsible for emitting schema-changing
statements to the databases - model creation/removal/alteration, field
renaming, index fiddling, and so on.
"""
# Overrideable SQL templates
sql_create_table = "CREATE TABLE %(table)s (%(definition)s)"
sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s"
sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s"
sql_delete_table = "DROP TABLE %(table)s CASCADE"
sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s"
sql_alter_column = "ALTER TABLE %(table)s %(changes)s"
sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s"
sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL"
sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL"
sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s"
sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT"
sql_alter_column_no_default_null = sql_alter_column_no_default
sql_alter_column_collate = "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s"
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE"
sql_rename_column = (
"ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s"
)
sql_update_with_default = (
"UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL"
)
sql_unique_constraint = "UNIQUE (%(columns)s)%(deferrable)s"
sql_check_constraint = "CHECK (%(check)s)"
sql_delete_constraint = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_constraint = "CONSTRAINT %(name)s %(constraint)s"
sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)"
sql_delete_check = sql_delete_constraint
sql_create_unique = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s "
"UNIQUE (%(columns)s)%(deferrable)s"
)
sql_delete_unique = sql_delete_constraint
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s"
)
sql_create_inline_fk = None
sql_create_column_inline_fk = None
sql_delete_fk = sql_delete_constraint
sql_create_index = (
"CREATE INDEX %(name)s ON %(table)s "
"(%(columns)s)%(include)s%(extra)s%(condition)s"
)
sql_create_unique_index = (
"CREATE UNIQUE INDEX %(name)s ON %(table)s "
"(%(columns)s)%(include)s%(condition)s"
)
sql_delete_index = "DROP INDEX %(name)s"
sql_create_pk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
)
sql_delete_pk = sql_delete_constraint
sql_delete_procedure = "DROP PROCEDURE %(procedure)s"
def __init__(self, connection, collect_sql=False, atomic=True):
self.connection = connection
self.collect_sql = collect_sql
if self.collect_sql:
self.collected_sql = []
self.atomic_migration = self.connection.features.can_rollback_ddl and atomic
# State-managing methods
def __enter__(self):
self.deferred_sql = []
if self.atomic_migration:
self.atomic = atomic(self.connection.alias)
self.atomic.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
for sql in self.deferred_sql:
self.execute(sql)
if self.atomic_migration:
self.atomic.__exit__(exc_type, exc_value, traceback)
# Core utility functions
def execute(self, sql, params=()):
"""Execute the given SQL statement, with optional parameters."""
# Don't perform the transactional DDL check if SQL is being collected
# as it's not going to be executed anyway.
if (
not self.collect_sql
and self.connection.in_atomic_block
and not self.connection.features.can_rollback_ddl
):
raise TransactionManagementError(
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
# Account for non-string statement objects.
sql = str(sql)
# Log the command we're running, then run it
logger.debug(
"%s; (params %r)", sql, params, extra={"params": params, "sql": sql}
)
if self.collect_sql:
ending = "" if sql.rstrip().endswith(";") else ";"
if params is not None:
self.collected_sql.append(
(sql % tuple(map(self.quote_value, params))) + ending
)
else:
self.collected_sql.append(sql + ending)
else:
with self.connection.cursor() as cursor:
cursor.execute(sql, params)
def quote_name(self, name):
return self.connection.ops.quote_name(name)
def table_sql(self, model):
"""Take a model and return its table definition."""
# Add any unique_togethers (always deferred, as some fields might be
# created afterward, like geometry fields with some backends).
for field_names in model._meta.unique_together:
fields = [model._meta.get_field(field) for field in field_names]
self.deferred_sql.append(self._create_unique_sql(model, fields))
# Create column SQL, add FK deferreds if needed.
column_sqls = []
params = []
for field in model._meta.local_fields:
# SQL.
definition, extra_params = self.column_sql(model, field)
if definition is None:
continue
# Check constraints can go on the column SQL here.
db_params = field.db_parameters(connection=self.connection)
if db_params["check"]:
definition += " " + self.sql_check_constraint % db_params
# Autoincrement SQL (for backends with inline variant).
col_type_suffix = field.db_type_suffix(connection=self.connection)
if col_type_suffix:
definition += " %s" % col_type_suffix
params.extend(extra_params)
# FK.
if field.remote_field and field.db_constraint:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(
field.remote_field.field_name
).column
if self.sql_create_inline_fk:
definition += " " + self.sql_create_inline_fk % {
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
}
elif self.connection.features.supports_foreign_keys:
self.deferred_sql.append(
self._create_fk_sql(
model, field, "_fk_%(to_table)s_%(to_column)s"
)
)
# Add the SQL to our big list.
column_sqls.append(
"%s %s"
% (
self.quote_name(field.column),
definition,
)
)
# Autoincrement SQL (for backends with post table definition
# variant).
if field.get_internal_type() in (
"AutoField",
"BigAutoField",
"SmallAutoField",
):
autoinc_sql = self.connection.ops.autoinc_sql(
model._meta.db_table, field.column
)
if autoinc_sql:
self.deferred_sql.extend(autoinc_sql)
constraints = [
constraint.constraint_sql(model, self)
for constraint in model._meta.constraints
]
sql = self.sql_create_table % {
"table": self.quote_name(model._meta.db_table),
"definition": ", ".join(
constraint for constraint in (*column_sqls, *constraints) if constraint
),
}
if model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(
model._meta.db_tablespace
)
if tablespace_sql:
sql += " " + tablespace_sql
return sql, params
# Field <-> database mapping functions
def _iter_column_sql(
self, column_db_type, params, model, field, field_db_params, include_default
):
yield column_db_type
if collation := field_db_params.get("collation"):
yield self._collate_sql(collation)
# Work out nullability.
null = field.null
# Include a default value, if requested.
include_default = (
include_default
and not self.skip_default(field)
and
# Don't include a default value if it's a nullable field and the
# default cannot be dropped in the ALTER COLUMN statement (e.g.
# MySQL longtext and longblob).
not (null and self.skip_default_on_alter(field))
)
if include_default:
default_value = self.effective_default(field)
if default_value is not None:
column_default = "DEFAULT " + self._column_default_sql(field)
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (Oracle).
# If this is the case, the individual schema backend should
# implement prepare_default().
yield column_default % self.prepare_default(default_value)
else:
yield column_default
params.append(default_value)
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if (
field.empty_strings_allowed
and not field.primary_key
and self.connection.features.interprets_empty_strings_as_nulls
):
null = True
if not null:
yield "NOT NULL"
elif not self.connection.features.implied_column_null:
yield "NULL"
if field.primary_key:
yield "PRIMARY KEY"
elif field.unique:
yield "UNIQUE"
# Optionally add the tablespace if it's an implicitly indexed column.
tablespace = field.db_tablespace or model._meta.db_tablespace
if (
tablespace
and self.connection.features.supports_tablespaces
and field.unique
):
yield self.connection.ops.tablespace_sql(tablespace, inline=True)
def column_sql(self, model, field, include_default=False):
"""
Return the column definition for a field. The field must already have
had set_attributes_from_name() called.
"""
# Get the column's type and use that as the basis of the SQL.
field_db_params = field.db_parameters(connection=self.connection)
column_db_type = field_db_params["type"]
# Check for fields that aren't actually columns (e.g. M2M).
if column_db_type is None:
return None, None
params = []
return (
" ".join(
# This appends to the params being returned.
self._iter_column_sql(
column_db_type,
params,
model,
field,
field_db_params,
include_default,
)
),
params,
)
def skip_default(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob).
"""
return False
def skip_default_on_alter(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob) in the ALTER COLUMN statement.
"""
return False
def prepare_default(self, value):
"""
Only used for backends which have requires_literal_defaults feature
"""
raise NotImplementedError(
"subclasses of BaseDatabaseSchemaEditor for backends which have "
"requires_literal_defaults must provide a prepare_default() method"
)
def _column_default_sql(self, field):
"""
Return the SQL to use in a DEFAULT clause. The resulting string should
contain a '%s' placeholder for a default value.
"""
return "%s"
@staticmethod
def _effective_default(field):
# This method allows testing its logic without a connection.
if field.has_default():
default = field.get_default()
elif not field.null and field.blank and field.empty_strings_allowed:
if field.get_internal_type() == "BinaryField":
default = b""
else:
default = ""
elif getattr(field, "auto_now", False) or getattr(field, "auto_now_add", False):
internal_type = field.get_internal_type()
if internal_type == "DateTimeField":
default = timezone.now()
else:
default = datetime.now()
if internal_type == "DateField":
default = default.date()
elif internal_type == "TimeField":
default = default.time()
else:
default = None
return default
def effective_default(self, field):
"""Return a field's effective database default value."""
return field.get_db_prep_save(self._effective_default(field), self.connection)
def quote_value(self, value):
"""
Return a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends (defaults are not user-defined, though,
so this is safe).
"""
raise NotImplementedError()
# Actions
def create_model(self, model):
"""
Create a table and any accompanying indexes or unique constraints for
the given `model`.
"""
sql, params = self.table_sql(model)
# Prevent using [] as params, in the case a literal '%' is used in the
# definition.
self.execute(sql, params or None)
# Add any field index and index_together's (deferred as SQLite
# _remake_table needs it).
self.deferred_sql.extend(self._model_indexes_sql(model))
# Make M2M tables
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.create_model(field.remote_field.through)
def delete_model(self, model):
"""Delete a model from the database."""
# Handle auto-created intermediary models
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.delete_model(field.remote_field.through)
# Delete the table
self.execute(
self.sql_delete_table
% {
"table": self.quote_name(model._meta.db_table),
}
)
# Remove all deferred statements referencing the deleted table.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_table(
model._meta.db_table
):
self.deferred_sql.remove(sql)
def add_index(self, model, index):
"""Add an index on a model."""
if (
index.contains_expressions
and not self.connection.features.supports_expression_indexes
):
return None
# Index.create_sql returns interpolated SQL which makes params=None a
# necessity to avoid escaping attempts on execution.
self.execute(index.create_sql(model, self), params=None)
def remove_index(self, model, index):
"""Remove an index from a model."""
if (
index.contains_expressions
and not self.connection.features.supports_expression_indexes
):
return None
self.execute(index.remove_sql(model, self))
def add_constraint(self, model, constraint):
"""Add a constraint to a model."""
sql = constraint.create_sql(model, self)
if sql:
# Constraint.create_sql returns interpolated SQL which makes
# params=None a necessity to avoid escaping attempts on execution.
self.execute(sql, params=None)
def remove_constraint(self, model, constraint):
"""Remove a constraint from a model."""
sql = constraint.remove_sql(model, self)
if sql:
self.execute(sql)
def alter_unique_together(self, model, old_unique_together, new_unique_together):
"""
Deal with a model changing its unique_together. The input
unique_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_unique_together}
news = {tuple(fields) for fields in new_unique_together}
# Deleted uniques
for fields in olds.difference(news):
self._delete_composed_index(
model, fields, {"unique": True}, self.sql_delete_unique
)
# Created uniques
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_unique_sql(model, fields))
def alter_index_together(self, model, old_index_together, new_index_together):
"""
Deal with a model changing its index_together. The input
index_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_index_together}
news = {tuple(fields) for fields in new_index_together}
# Deleted indexes
for fields in olds.difference(news):
self._delete_composed_index(
model,
fields,
{"index": True, "unique": False},
self.sql_delete_index,
)
# Created indexes
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_index_sql(model, fields=fields, suffix="_idx"))
def _delete_composed_index(self, model, fields, constraint_kwargs, sql):
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
meta_index_names = {constraint.name for constraint in model._meta.indexes}
columns = [model._meta.get_field(field).column for field in fields]
constraint_names = self._constraint_names(
model,
columns,
exclude=meta_constraint_names | meta_index_names,
**constraint_kwargs,
)
if len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of constraints for %s(%s)"
% (
len(constraint_names),
model._meta.db_table,
", ".join(columns),
)
)
self.execute(self._delete_constraint_sql(sql, model, constraint_names[0]))
def alter_db_table(self, model, old_db_table, new_db_table):
"""Rename the table a model points to."""
if old_db_table == new_db_table or (
self.connection.features.ignores_table_name_case
and old_db_table.lower() == new_db_table.lower()
):
return
self.execute(
self.sql_rename_table
% {
"old_table": self.quote_name(old_db_table),
"new_table": self.quote_name(new_db_table),
}
)
# Rename all references to the old table name.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_table_references(old_db_table, new_db_table)
def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace):
"""Move a model's table between tablespaces."""
self.execute(
self.sql_retablespace_table
% {
"table": self.quote_name(model._meta.db_table),
"old_tablespace": self.quote_name(old_db_tablespace),
"new_tablespace": self.quote_name(new_db_tablespace),
}
)
def add_field(self, model, field):
"""
Create a field on a model. Usually involves adding a column, but may
involve adding a table instead (for M2M fields).
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.create_model(field.remote_field.through)
# Get the column's definition
definition, params = self.column_sql(model, field, include_default=True)
# It might not actually have a column behind it
if definition is None:
return
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params["check"]:
definition += " " + self.sql_check_constraint % db_params
if (
field.remote_field
and self.connection.features.supports_foreign_keys
and field.db_constraint
):
constraint_suffix = "_fk_%(to_table)s_%(to_column)s"
# Add FK constraint inline, if supported.
if self.sql_create_column_inline_fk:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(
field.remote_field.field_name
).column
namespace, _ = split_identifier(model._meta.db_table)
definition += " " + self.sql_create_column_inline_fk % {
"name": self._fk_constraint_name(model, field, constraint_suffix),
"namespace": "%s." % self.quote_name(namespace)
if namespace
else "",
"column": self.quote_name(field.column),
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
"deferrable": self.connection.ops.deferrable_sql(),
}
# Otherwise, add FK constraints later.
else:
self.deferred_sql.append(
self._create_fk_sql(model, field, constraint_suffix)
)
# Build the SQL and run it
sql = self.sql_create_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
"definition": definition,
}
self.execute(sql, params)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if (
not self.skip_default_on_alter(field)
and self.effective_default(field) is not None
):
changes_sql, params = self._alter_column_default_sql(
model, None, field, drop=True
)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Add an index, if required
self.deferred_sql.extend(self._field_indexes_sql(model, field))
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def remove_field(self, model, field):
"""
Remove a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.delete_model(field.remote_field.through)
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)["type"] is None:
return
# Drop any FK constraints, MySQL requires explicit deletion
if field.remote_field:
fk_names = self._constraint_names(model, [field.column], foreign_key=True)
for fk_name in fk_names:
self.execute(self._delete_fk_sql(model, fk_name))
# Delete the column
sql = self.sql_delete_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
# Remove all deferred statements referencing the deleted column.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_column(
model._meta.db_table, field.column
):
self.deferred_sql.remove(sql)
def alter_field(self, model, old_field, new_field, strict=False):
"""
Allow a field's type, uniqueness, nullability, default, column,
constraints, etc. to be modified.
`old_field` is required to compute the necessary changes.
If `strict` is True, raise errors if the old column does not match
`old_field` precisely.
"""
if not self._field_should_be_altered(old_field, new_field):
return
# Ensure this field is even column-based
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params["type"]
new_db_params = new_field.db_parameters(connection=self.connection)
new_type = new_db_params["type"]
if (old_type is None and old_field.remote_field is None) or (
new_type is None and new_field.remote_field is None
):
raise ValueError(
"Cannot alter field %s into %s - they do not properly define "
"db_type (are you using a badly-written custom field?)"
% (old_field, new_field),
)
elif (
old_type is None
and new_type is None
and (
old_field.remote_field.through
and new_field.remote_field.through
and old_field.remote_field.through._meta.auto_created
and new_field.remote_field.through._meta.auto_created
)
):
return self._alter_many_to_many(model, old_field, new_field, strict)
elif (
old_type is None
and new_type is None
and (
old_field.remote_field.through
and new_field.remote_field.through
and not old_field.remote_field.through._meta.auto_created
and not new_field.remote_field.through._meta.auto_created
)
):
# Both sides have through models; this is a no-op.
return
elif old_type is None or new_type is None:
raise ValueError(
"Cannot alter field %s into %s - they are not compatible types "
"(you cannot alter to or from M2M fields, or add or remove "
"through= on M2M fields)" % (old_field, new_field)
)
self._alter_field(
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict,
)
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
"""Perform a "physical" (non-ManyToMany) field update."""
# Drop any FK constraints, we'll remake them later
fks_dropped = set()
if (
self.connection.features.supports_foreign_keys
and old_field.remote_field
and old_field.db_constraint
):
fk_names = self._constraint_names(
model, [old_field.column], foreign_key=True
)
if strict and len(fk_names) != 1:
raise ValueError(
"Found wrong number (%s) of foreign key constraints for %s.%s"
% (
len(fk_names),
model._meta.db_table,
old_field.column,
)
)
for fk_name in fk_names:
fks_dropped.add((old_field.column,))
self.execute(self._delete_fk_sql(model, fk_name))
# Has unique been removed?
if old_field.unique and (
not new_field.unique or self._field_became_primary_key(old_field, new_field)
):
# Find the unique constraint for this field
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
constraint_names = self._constraint_names(
model,
[old_field.column],
unique=True,
primary_key=False,
exclude=meta_constraint_names,
)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of unique constraints for %s.%s"
% (
len(constraint_names),
model._meta.db_table,
old_field.column,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_unique_sql(model, constraint_name))
# Drop incoming FK constraints if the field is a primary key or unique,
# which might be a to_field target, and things are going to change.
drop_foreign_keys = (
self.connection.features.supports_foreign_keys
and (
(old_field.primary_key and new_field.primary_key)
or (old_field.unique and new_field.unique)
)
and old_type != new_type
)
if drop_foreign_keys:
# '_meta.related_field' also contains M2M reverse fields, these
# will be filtered out
for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field):
rel_fk_names = self._constraint_names(
new_rel.related_model, [new_rel.field.column], foreign_key=True
)
for fk_name in rel_fk_names:
self.execute(self._delete_fk_sql(new_rel.related_model, fk_name))
# Removed an index? (no strict check, as multiple indexes are possible)
# Remove indexes if db_index switched to False or a unique constraint
# will now be used in lieu of an index. The following lines from the
# truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# True | False | False | False
# True | False | False | True
# True | False | True | True
if (
old_field.db_index
and not old_field.unique
and (not new_field.db_index or new_field.unique)
):
# Find the index for this field
meta_index_names = {index.name for index in model._meta.indexes}
# Retrieve only BTREE indexes since this is what's created with
# db_index=True.
index_names = self._constraint_names(
model,
[old_field.column],
index=True,
type_=Index.suffix,
exclude=meta_index_names,
)
for index_name in index_names:
# The only way to check if an index was created with
# db_index=True or with Index(['field'], name='foo')
# is to look at its name (refs #28053).
self.execute(self._delete_index_sql(model, index_name))
# Change check constraints?
if old_db_params["check"] != new_db_params["check"] and old_db_params["check"]:
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
constraint_names = self._constraint_names(
model,
[old_field.column],
check=True,
exclude=meta_constraint_names,
)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of check constraints for %s.%s"
% (
len(constraint_names),
model._meta.db_table,
old_field.column,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_check_sql(model, constraint_name))
# Have they renamed the column?
if old_field.column != new_field.column:
self.execute(
self._rename_field_sql(
model._meta.db_table, old_field, new_field, new_type
)
)
# Rename all references to the renamed column.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_column_references(
model._meta.db_table, old_field.column, new_field.column
)
# Next, start accumulating actions to do
actions = []
null_actions = []
post_actions = []
# Type suffix change? (e.g. auto increment).
old_type_suffix = old_field.db_type_suffix(connection=self.connection)
new_type_suffix = new_field.db_type_suffix(connection=self.connection)
# Collation change?
old_collation = old_db_params.get("collation")
new_collation = new_db_params.get("collation")
if old_collation != new_collation:
# Collation change handles also a type change.
fragment = self._alter_column_collation_sql(
model, new_field, new_type, new_collation
)
actions.append(fragment)
# Type change?
elif (old_type, old_type_suffix) != (new_type, new_type_suffix):
fragment, other_actions = self._alter_column_type_sql(
model, old_field, new_field, new_type
)
actions.append(fragment)
post_actions.extend(other_actions)
# When changing a column NULL constraint to NOT NULL with a given
# default value, we need to perform 4 steps:
# 1. Add a default for new incoming writes
# 2. Update existing NULL rows with new default
# 3. Replace NULL constraint with NOT NULL
# 4. Drop the default again.
# Default change?
needs_database_default = False
if old_field.null and not new_field.null:
old_default = self.effective_default(old_field)
new_default = self.effective_default(new_field)
if (
not self.skip_default_on_alter(new_field)
and old_default != new_default
and new_default is not None
):
needs_database_default = True
actions.append(
self._alter_column_default_sql(model, old_field, new_field)
)
# Nullability change?
if old_field.null != new_field.null:
fragment = self._alter_column_null_sql(model, old_field, new_field)
if fragment:
null_actions.append(fragment)
# Only if we have a default and there is a change from NULL to NOT NULL
four_way_default_alteration = new_field.has_default() and (
old_field.null and not new_field.null
)
if actions or null_actions:
if not four_way_default_alteration:
# If we don't have to do a 4-way default alteration we can
# directly run a (NOT) NULL alteration
actions = actions + null_actions
# Combine actions together if we can (e.g. postgres)
if self.connection.features.supports_combined_alters and actions:
sql, params = tuple(zip(*actions))
actions = [(", ".join(sql), sum(params, []))]
# Apply those actions
for sql, params in actions:
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if four_way_default_alteration:
# Update existing rows with default value
self.execute(
self.sql_update_with_default
% {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
)
# Since we didn't run a NOT NULL change before we need to do it
# now
for sql, params in null_actions:
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if post_actions:
for sql, params in post_actions:
self.execute(sql, params)
# If primary_key changed to False, delete the primary key constraint.
if old_field.primary_key and not new_field.primary_key:
self._delete_primary_key(model, strict)
# Added a unique?
if self._unique_should_be_added(old_field, new_field):
self.execute(self._create_unique_sql(model, [new_field]))
# Added an index? Add an index if db_index switched to True or a unique
# constraint will no longer be used in lieu of an index. The following
# lines from the truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# False | False | True | False
# False | True | True | False
# True | True | True | False
if (
(not old_field.db_index or old_field.unique)
and new_field.db_index
and not new_field.unique
):
self.execute(self._create_index_sql(model, fields=[new_field]))
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = []
if drop_foreign_keys:
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Changed to become primary key?
if self._field_became_primary_key(old_field, new_field):
# Make the new one
self.execute(self._create_primary_key_sql(model, new_field))
# Update all referencing columns
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Handle our type alters on the other end of rels from the PK stuff above
for old_rel, new_rel in rels_to_update:
rel_db_params = new_rel.field.db_parameters(connection=self.connection)
rel_type = rel_db_params["type"]
fragment, other_actions = self._alter_column_type_sql(
new_rel.related_model, old_rel.field, new_rel.field, rel_type
)
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(new_rel.related_model._meta.db_table),
"changes": fragment[0],
},
fragment[1],
)
for sql, params in other_actions:
self.execute(sql, params)
# Does it have a foreign key?
if (
self.connection.features.supports_foreign_keys
and new_field.remote_field
and (
fks_dropped or not old_field.remote_field or not old_field.db_constraint
)
and new_field.db_constraint
):
self.execute(
self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s")
)
# Rebuild FKs that pointed to us if we previously had to drop them
if drop_foreign_keys:
for _, rel in rels_to_update:
if rel.field.db_constraint:
self.execute(
self._create_fk_sql(rel.related_model, rel.field, "_fk")
)
# Does it have check constraints we need to add?
if old_db_params["check"] != new_db_params["check"] and new_db_params["check"]:
constraint_name = self._create_index_name(
model._meta.db_table, [new_field.column], suffix="_check"
)
self.execute(
self._create_check_sql(model, constraint_name, new_db_params["check"])
)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if needs_database_default:
changes_sql, params = self._alter_column_default_sql(
model, old_field, new_field, drop=True
)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def _alter_column_null_sql(self, model, old_field, new_field):
"""
Hook to specialize column null alteration.
Return a (sql, params) fragment to set a column to null or non-null
as required by new_field, or None if no changes are required.
"""
if (
self.connection.features.interprets_empty_strings_as_nulls
and new_field.empty_strings_allowed
):
# The field is nullable in the database anyway, leave it alone.
return
else:
new_db_params = new_field.db_parameters(connection=self.connection)
sql = (
self.sql_alter_column_null
if new_field.null
else self.sql_alter_column_not_null
)
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_db_params["type"],
},
[],
)
def _alter_column_default_sql(self, model, old_field, new_field, drop=False):
"""
Hook to specialize column default alteration.
Return a (sql, params) fragment to add or drop (depending on the drop
argument) a default to new_field's column.
"""
new_default = self.effective_default(new_field)
default = self._column_default_sql(new_field)
params = [new_default]
if drop:
params = []
elif self.connection.features.requires_literal_defaults:
# Some databases (Oracle) can't take defaults as a parameter
# If this is the case, the SchemaEditor for that database should
# implement prepare_default().
default = self.prepare_default(new_default)
params = []
new_db_params = new_field.db_parameters(connection=self.connection)
if drop:
if new_field.null:
sql = self.sql_alter_column_no_default_null
else:
sql = self.sql_alter_column_no_default
else:
sql = self.sql_alter_column_default
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_db_params["type"],
"default": default,
},
params,
)
def _alter_column_type_sql(self, model, old_field, new_field, new_type):
"""
Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields).
Return a two-tuple of: an SQL fragment of (sql, params) to insert into
an ALTER TABLE statement and a list of extra (sql, params) tuples to
run once the field is altered.
"""
return (
(
self.sql_alter_column_type
% {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
),
[],
)
def _alter_column_collation_sql(self, model, new_field, new_type, new_collation):
return (
self.sql_alter_column_collate
% {
"column": self.quote_name(new_field.column),
"type": new_type,
"collation": " " + self._collate_sql(new_collation)
if new_collation
else "",
},
[],
)
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""Alter M2Ms to repoint their to= endpoints."""
# Rename the through table
if (
old_field.remote_field.through._meta.db_table
!= new_field.remote_field.through._meta.db_table
):
self.alter_db_table(
old_field.remote_field.through,
old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table,
)
# Repoint the FK to the other side
self.alter_field(
new_field.remote_field.through,
# The field that points to the target model is needed, so we can
# tell alter_field to change it - this is m2m_reverse_field_name()
# (as opposed to m2m_field_name(), which points to our model).
old_field.remote_field.through._meta.get_field(
old_field.m2m_reverse_field_name()
),
new_field.remote_field.through._meta.get_field(
new_field.m2m_reverse_field_name()
),
)
self.alter_field(
new_field.remote_field.through,
# for self-referential models we need to alter field from the other end too
old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()),
)
def _create_index_name(self, table_name, column_names, suffix=""):
"""
Generate a unique name for an index/unique constraint.
The name is divided into 3 parts: the table name, the column names,
and a unique digest and suffix.
"""
_, table_name = split_identifier(table_name)
hash_suffix_part = "%s%s" % (
names_digest(table_name, *column_names, length=8),
suffix,
)
max_length = self.connection.ops.max_name_length() or 200
# If everything fits into max_length, use that name.
index_name = "%s_%s_%s" % (table_name, "_".join(column_names), hash_suffix_part)
if len(index_name) <= max_length:
return index_name
# Shorten a long suffix.
if len(hash_suffix_part) > max_length / 3:
hash_suffix_part = hash_suffix_part[: max_length // 3]
other_length = (max_length - len(hash_suffix_part)) // 2 - 1
index_name = "%s_%s_%s" % (
table_name[:other_length],
"_".join(column_names)[:other_length],
hash_suffix_part,
)
# Prepend D if needed to prevent the name from starting with an
# underscore or a number (not permitted on Oracle).
if index_name[0] == "_" or index_name[0].isdigit():
index_name = "D%s" % index_name[:-1]
return index_name
def _get_index_tablespace_sql(self, model, fields, db_tablespace=None):
if db_tablespace is None:
if len(fields) == 1 and fields[0].db_tablespace:
db_tablespace = fields[0].db_tablespace
elif model._meta.db_tablespace:
db_tablespace = model._meta.db_tablespace
if db_tablespace is not None:
return " " + self.connection.ops.tablespace_sql(db_tablespace)
return ""
def _index_condition_sql(self, condition):
if condition:
return " WHERE " + condition
return ""
def _index_include_sql(self, model, columns):
if not columns or not self.connection.features.supports_covering_indexes:
return ""
return Statement(
" INCLUDE (%(columns)s)",
columns=Columns(model._meta.db_table, columns, self.quote_name),
)
def _create_index_sql(
self,
model,
*,
fields=None,
name=None,
suffix="",
using="",
db_tablespace=None,
col_suffixes=(),
sql=None,
opclasses=(),
condition=None,
include=None,
expressions=None,
):
"""
Return the SQL statement to create the index for one or several fields
or expressions. `sql` can be specified if the syntax differs from the
standard (GIS indexes, ...).
"""
fields = fields or []
expressions = expressions or []
compiler = Query(model, alias_cols=False).get_compiler(
connection=self.connection,
)
tablespace_sql = self._get_index_tablespace_sql(
model, fields, db_tablespace=db_tablespace
)
columns = [field.column for field in fields]
sql_create_index = sql or self.sql_create_index
table = model._meta.db_table
def create_index_name(*args, **kwargs):
nonlocal name
if name is None:
name = self._create_index_name(*args, **kwargs)
return self.quote_name(name)
return Statement(
sql_create_index,
table=Table(table, self.quote_name),
name=IndexName(table, columns, suffix, create_index_name),
using=using,
columns=(
self._index_columns(table, columns, col_suffixes, opclasses)
if columns
else Expressions(table, expressions, compiler, self.quote_value)
),
extra=tablespace_sql,
condition=self._index_condition_sql(condition),
include=self._index_include_sql(model, include),
)
def _delete_index_sql(self, model, name, sql=None):
return Statement(
sql or self.sql_delete_index,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
)
def _index_columns(self, table, columns, col_suffixes, opclasses):
return Columns(table, columns, self.quote_name, col_suffixes=col_suffixes)
def _model_indexes_sql(self, model):
"""
Return a list of all index SQL statements (field indexes,
index_together, Meta.indexes) for the specified model.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
for field in model._meta.local_fields:
output.extend(self._field_indexes_sql(model, field))
for field_names in model._meta.index_together:
fields = [model._meta.get_field(field) for field in field_names]
output.append(self._create_index_sql(model, fields=fields, suffix="_idx"))
for index in model._meta.indexes:
if (
not index.contains_expressions
or self.connection.features.supports_expression_indexes
):
output.append(index.create_sql(model, self))
return output
def _field_indexes_sql(self, model, field):
"""
Return a list of all index SQL statements for the specified field.
"""
output = []
if self._field_should_be_indexed(model, field):
output.append(self._create_index_sql(model, fields=[field]))
return output
def _field_should_be_altered(self, old_field, new_field):
_, old_path, old_args, old_kwargs = old_field.deconstruct()
_, new_path, new_args, new_kwargs = new_field.deconstruct()
# Don't alter when:
# - changing only a field name
# - changing an attribute that doesn't affect the schema
# - adding only a db_column and the column name is not changed
for attr in old_field.non_db_attrs:
old_kwargs.pop(attr, None)
for attr in new_field.non_db_attrs:
new_kwargs.pop(attr, None)
return self.quote_name(old_field.column) != self.quote_name(
new_field.column
) or (old_path, old_args, old_kwargs) != (new_path, new_args, new_kwargs)
def _field_should_be_indexed(self, model, field):
return field.db_index and not field.unique
def _field_became_primary_key(self, old_field, new_field):
return not old_field.primary_key and new_field.primary_key
def _unique_should_be_added(self, old_field, new_field):
return (
not new_field.primary_key
and new_field.unique
and (not old_field.unique or old_field.primary_key)
)
def _rename_field_sql(self, table, old_field, new_field, new_type):
return self.sql_rename_column % {
"table": self.quote_name(table),
"old_column": self.quote_name(old_field.column),
"new_column": self.quote_name(new_field.column),
"type": new_type,
}
def _create_fk_sql(self, model, field, suffix):
table = Table(model._meta.db_table, self.quote_name)
name = self._fk_constraint_name(model, field, suffix)
column = Columns(model._meta.db_table, [field.column], self.quote_name)
to_table = Table(field.target_field.model._meta.db_table, self.quote_name)
to_column = Columns(
field.target_field.model._meta.db_table,
[field.target_field.column],
self.quote_name,
)
deferrable = self.connection.ops.deferrable_sql()
return Statement(
self.sql_create_fk,
table=table,
name=name,
column=column,
to_table=to_table,
to_column=to_column,
deferrable=deferrable,
)
def _fk_constraint_name(self, model, field, suffix):
def create_fk_name(*args, **kwargs):
return self.quote_name(self._create_index_name(*args, **kwargs))
return ForeignKeyName(
model._meta.db_table,
[field.column],
split_identifier(field.target_field.model._meta.db_table)[1],
[field.target_field.column],
suffix,
create_fk_name,
)
def _delete_fk_sql(self, model, name):
return self._delete_constraint_sql(self.sql_delete_fk, model, name)
def _deferrable_constraint_sql(self, deferrable):
if deferrable is None:
return ""
if deferrable == Deferrable.DEFERRED:
return " DEFERRABLE INITIALLY DEFERRED"
if deferrable == Deferrable.IMMEDIATE:
return " DEFERRABLE INITIALLY IMMEDIATE"
def _unique_sql(
self,
model,
fields,
name,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
):
return None
if condition or include or opclasses or expressions:
# Databases support conditional, covering, and functional unique
# constraints via a unique index.
sql = self._create_unique_sql(
model,
fields,
name=name,
condition=condition,
include=include,
opclasses=opclasses,
expressions=expressions,
)
if sql:
self.deferred_sql.append(sql)
return None
constraint = self.sql_unique_constraint % {
"columns": ", ".join([self.quote_name(field.column) for field in fields]),
"deferrable": self._deferrable_constraint_sql(deferrable),
}
return self.sql_constraint % {
"name": self.quote_name(name),
"constraint": constraint,
}
def _create_unique_sql(
self,
model,
fields,
name=None,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
(
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
)
or (condition and not self.connection.features.supports_partial_indexes)
or (include and not self.connection.features.supports_covering_indexes)
or (
expressions and not self.connection.features.supports_expression_indexes
)
):
return None
def create_unique_name(*args, **kwargs):
return self.quote_name(self._create_index_name(*args, **kwargs))
compiler = Query(model, alias_cols=False).get_compiler(
connection=self.connection
)
table = model._meta.db_table
columns = [field.column for field in fields]
if name is None:
name = IndexName(table, columns, "_uniq", create_unique_name)
else:
name = self.quote_name(name)
if condition or include or opclasses or expressions:
sql = self.sql_create_unique_index
else:
sql = self.sql_create_unique
if columns:
columns = self._index_columns(
table, columns, col_suffixes=(), opclasses=opclasses
)
else:
columns = Expressions(table, expressions, compiler, self.quote_value)
return Statement(
sql,
table=Table(table, self.quote_name),
name=name,
columns=columns,
condition=self._index_condition_sql(condition),
deferrable=self._deferrable_constraint_sql(deferrable),
include=self._index_include_sql(model, include),
)
def _delete_unique_sql(
self,
model,
name,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
(
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
)
or (condition and not self.connection.features.supports_partial_indexes)
or (include and not self.connection.features.supports_covering_indexes)
or (
expressions and not self.connection.features.supports_expression_indexes
)
):
return None
if condition or include or opclasses or expressions:
sql = self.sql_delete_index
else:
sql = self.sql_delete_unique
return self._delete_constraint_sql(sql, model, name)
def _check_sql(self, name, check):
return self.sql_constraint % {
"name": self.quote_name(name),
"constraint": self.sql_check_constraint % {"check": check},
}
def _create_check_sql(self, model, name, check):
return Statement(
self.sql_create_check,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
check=check,
)
def _delete_check_sql(self, model, name):
return self._delete_constraint_sql(self.sql_delete_check, model, name)
def _delete_constraint_sql(self, template, model, name):
return Statement(
template,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
)
def _constraint_names(
self,
model,
column_names=None,
unique=None,
primary_key=None,
index=None,
foreign_key=None,
check=None,
type_=None,
exclude=None,
):
"""Return all constraint names matching the columns and conditions."""
if column_names is not None:
column_names = [
self.connection.introspection.identifier_converter(name)
for name in column_names
]
with self.connection.cursor() as cursor:
constraints = self.connection.introspection.get_constraints(
cursor, model._meta.db_table
)
result = []
for name, infodict in constraints.items():
if column_names is None or column_names == infodict["columns"]:
if unique is not None and infodict["unique"] != unique:
continue
if primary_key is not None and infodict["primary_key"] != primary_key:
continue
if index is not None and infodict["index"] != index:
continue
if check is not None and infodict["check"] != check:
continue
if foreign_key is not None and not infodict["foreign_key"]:
continue
if type_ is not None and infodict["type"] != type_:
continue
if not exclude or name not in exclude:
result.append(name)
return result
def _delete_primary_key(self, model, strict=False):
constraint_names = self._constraint_names(model, primary_key=True)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of PK constraints for %s"
% (
len(constraint_names),
model._meta.db_table,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_primary_key_sql(model, constraint_name))
def _create_primary_key_sql(self, model, field):
return Statement(
self.sql_create_pk,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(
self._create_index_name(
model._meta.db_table, [field.column], suffix="_pk"
)
),
columns=Columns(model._meta.db_table, [field.column], self.quote_name),
)
def _delete_primary_key_sql(self, model, name):
return self._delete_constraint_sql(self.sql_delete_pk, model, name)
def _collate_sql(self, collation):
return "COLLATE " + self.quote_name(collation)
def remove_procedure(self, procedure_name, param_types=()):
sql = self.sql_delete_procedure % {
"procedure": self.quote_name(procedure_name),
"param_types": ",".join(param_types),
}
self.execute(sql)
|
c965f550168e450de8f8302a1691aaf21bf35a91caf19070e39f115479010b04 | import operator
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
allows_group_by_pk = True
related_fields_match_type = True
# MySQL doesn't support sliced subqueries with IN/ALL/ANY/SOME.
allow_sliced_subqueries_with_in = False
has_select_for_update = True
supports_forward_references = False
supports_regex_backreferencing = False
supports_date_lookup_using_string = False
supports_timezones = False
requires_explicit_null_ordering_when_grouping = True
can_release_savepoints = True
atomic_transactions = False
can_clone_databases = True
supports_temporal_subtraction = True
supports_select_intersection = False
supports_select_difference = False
supports_slicing_ordering_in_compound = True
supports_index_on_text_field = False
supports_update_conflicts = True
create_test_procedure_without_params_sql = """
CREATE PROCEDURE test_procedure ()
BEGIN
DECLARE V_I INTEGER;
SET V_I = 1;
END;
"""
create_test_procedure_with_int_param_sql = """
CREATE PROCEDURE test_procedure (P_I INTEGER)
BEGIN
DECLARE V_I INTEGER;
SET V_I = P_I;
END;
"""
# Neither MySQL nor MariaDB support partial indexes.
supports_partial_indexes = False
# COLLATE must be wrapped in parentheses because MySQL treats COLLATE as an
# indexed expression.
collate_as_index_expression = True
supports_order_by_nulls_modifier = False
order_by_nulls_first = True
supports_logical_xor = True
@cached_property
def minimum_database_version(self):
if self.connection.mysql_is_mariadb:
return (10, 2)
else:
return (5, 7)
@cached_property
def bare_select_suffix(self):
if (
self.connection.mysql_is_mariadb and self.connection.mysql_version < (10, 4)
) or (
not self.connection.mysql_is_mariadb
and self.connection.mysql_version < (8,)
):
return " FROM DUAL"
return ""
@cached_property
def test_collations(self):
charset = "utf8"
if self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
10,
6,
):
# utf8 is an alias for utf8mb3 in MariaDB 10.6+.
charset = "utf8mb3"
return {
"ci": f"{charset}_general_ci",
"non_default": f"{charset}_esperanto_ci",
"swedish_ci": f"{charset}_swedish_ci",
}
test_now_utc_template = "UTC_TIMESTAMP"
@cached_property
def django_test_skips(self):
skips = {
"This doesn't work on MySQL.": {
"db_functions.comparison.test_greatest.GreatestTests."
"test_coalesce_workaround",
"db_functions.comparison.test_least.LeastTests."
"test_coalesce_workaround",
},
"Running on MySQL requires utf8mb4 encoding (#18392).": {
"model_fields.test_textfield.TextFieldTests.test_emoji",
"model_fields.test_charfield.TestCharField.test_emoji",
},
"MySQL doesn't support functional indexes on a function that "
"returns JSON": {
"schema.tests.SchemaTests.test_func_index_json_key_transform",
},
"MySQL supports multiplying and dividing DurationFields by a "
"scalar value but it's not implemented (#25287).": {
"expressions.tests.FTimeDeltaTests.test_durationfield_multiply_divide",
},
}
if "ONLY_FULL_GROUP_BY" in self.connection.sql_mode:
skips.update(
{
"GROUP BY optimization does not work properly when "
"ONLY_FULL_GROUP_BY mode is enabled on MySQL, see #31331.": {
"aggregation.tests.AggregateTestCase."
"test_aggregation_subquery_annotation_multivalued",
"annotations.tests.NonAggregateAnnotationTestCase."
"test_annotation_aggregate_with_m2o",
},
}
)
if not self.connection.mysql_is_mariadb and self.connection.mysql_version < (
8,
):
skips.update(
{
"Casting to datetime/time is not supported by MySQL < 8.0. "
"(#30224)": {
"aggregation.tests.AggregateTestCase."
"test_aggregation_default_using_time_from_python",
"aggregation.tests.AggregateTestCase."
"test_aggregation_default_using_datetime_from_python",
},
"MySQL < 8.0 returns string type instead of datetime/time. "
"(#30224)": {
"aggregation.tests.AggregateTestCase."
"test_aggregation_default_using_time_from_database",
"aggregation.tests.AggregateTestCase."
"test_aggregation_default_using_datetime_from_database",
},
}
)
if self.connection.mysql_is_mariadb and (
10,
4,
3,
) < self.connection.mysql_version < (10, 5, 2):
skips.update(
{
"https://jira.mariadb.org/browse/MDEV-19598": {
"schema.tests.SchemaTests."
"test_alter_not_unique_field_to_primary_key",
},
}
)
if self.connection.mysql_is_mariadb and (
10,
4,
12,
) < self.connection.mysql_version < (10, 5):
skips.update(
{
"https://jira.mariadb.org/browse/MDEV-22775": {
"schema.tests.SchemaTests."
"test_alter_pk_with_self_referential_field",
},
}
)
if not self.supports_explain_analyze:
skips.update(
{
"MariaDB and MySQL >= 8.0.18 specific.": {
"queries.test_explain.ExplainTests.test_mysql_analyze",
},
}
)
return skips
@cached_property
def _mysql_storage_engine(self):
"Internal method used in Django tests. Don't rely on this from your code"
return self.connection.mysql_server_data["default_storage_engine"]
@cached_property
def allows_auto_pk_0(self):
"""
Autoincrement primary key can be set to 0 if it doesn't generate new
autoincrement values.
"""
return "NO_AUTO_VALUE_ON_ZERO" in self.connection.sql_mode
@cached_property
def update_can_self_select(self):
return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
10,
3,
2,
)
@cached_property
def can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
return self._mysql_storage_engine != "MyISAM"
@cached_property
def introspected_field_types(self):
return {
**super().introspected_field_types,
"BinaryField": "TextField",
"BooleanField": "IntegerField",
"DurationField": "BigIntegerField",
"GenericIPAddressField": "CharField",
}
@cached_property
def can_return_columns_from_insert(self):
return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
10,
5,
0,
)
can_return_rows_from_bulk_insert = property(
operator.attrgetter("can_return_columns_from_insert")
)
@cached_property
def has_zoneinfo_database(self):
return self.connection.mysql_server_data["has_zoneinfo_database"]
@cached_property
def is_sql_auto_is_null_enabled(self):
return self.connection.mysql_server_data["sql_auto_is_null"]
@cached_property
def supports_over_clause(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 2)
supports_frame_range_fixed_distance = property(
operator.attrgetter("supports_over_clause")
)
@cached_property
def supports_column_check_constraints(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 16)
supports_table_check_constraints = property(
operator.attrgetter("supports_column_check_constraints")
)
@cached_property
def can_introspect_check_constraints(self):
if self.connection.mysql_is_mariadb:
version = self.connection.mysql_version
return version >= (10, 3, 10)
return self.connection.mysql_version >= (8, 0, 16)
@cached_property
def has_select_for_update_skip_locked(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 6)
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def has_select_for_update_nowait(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def has_select_for_update_of(self):
return (
not self.connection.mysql_is_mariadb
and self.connection.mysql_version >= (8, 0, 1)
)
@cached_property
def supports_explain_analyze(self):
return self.connection.mysql_is_mariadb or self.connection.mysql_version >= (
8,
0,
18,
)
@cached_property
def supported_explain_formats(self):
# Alias MySQL's TRADITIONAL to TEXT for consistency with other
# backends.
formats = {"JSON", "TEXT", "TRADITIONAL"}
if not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
8,
0,
16,
):
formats.add("TREE")
return formats
@cached_property
def supports_transactions(self):
"""
All storage engines except MyISAM support transactions.
"""
return self._mysql_storage_engine != "MyISAM"
uses_savepoints = property(operator.attrgetter("supports_transactions"))
can_release_savepoints = property(operator.attrgetter("supports_transactions"))
@cached_property
def ignores_table_name_case(self):
return self.connection.mysql_server_data["lower_case_table_names"]
@cached_property
def supports_default_in_lead_lag(self):
# To be added in https://jira.mariadb.org/browse/MDEV-12981.
return not self.connection.mysql_is_mariadb
@cached_property
def supports_json_field(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (5, 7, 8)
@cached_property
def can_introspect_json_field(self):
if self.connection.mysql_is_mariadb:
return self.supports_json_field and self.can_introspect_check_constraints
return self.supports_json_field
@cached_property
def supports_index_column_ordering(self):
if self._mysql_storage_engine != "InnoDB":
return False
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 8)
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def supports_expression_indexes(self):
return (
not self.connection.mysql_is_mariadb
and self._mysql_storage_engine != "MyISAM"
and self.connection.mysql_version >= (8, 0, 13)
)
|
f9643f3332ddf76bace5bff6d9d9fb8fffa1b50d20290655839dbf71cce491cc | from collections import namedtuple
from django.db.backends.base.introspection import BaseDatabaseIntrospection
from django.db.backends.base.introspection import FieldInfo as BaseFieldInfo
from django.db.backends.base.introspection import TableInfo
from django.db.models import Index
FieldInfo = namedtuple("FieldInfo", BaseFieldInfo._fields + ("is_autofield",))
class DatabaseIntrospection(BaseDatabaseIntrospection):
# Maps type codes to Django Field types.
data_types_reverse = {
16: "BooleanField",
17: "BinaryField",
20: "BigIntegerField",
21: "SmallIntegerField",
23: "IntegerField",
25: "TextField",
700: "FloatField",
701: "FloatField",
869: "GenericIPAddressField",
1042: "CharField", # blank-padded
1043: "CharField",
1082: "DateField",
1083: "TimeField",
1114: "DateTimeField",
1184: "DateTimeField",
1186: "DurationField",
1266: "TimeField",
1700: "DecimalField",
2950: "UUIDField",
3802: "JSONField",
}
# A hook for subclasses.
index_default_access_method = "btree"
ignored_tables = []
def get_field_type(self, data_type, description):
field_type = super().get_field_type(data_type, description)
if description.is_autofield or (
# Required for pre-Django 4.1 serial columns.
description.default
and "nextval" in description.default
):
if field_type == "IntegerField":
return "AutoField"
elif field_type == "BigIntegerField":
return "BigAutoField"
elif field_type == "SmallIntegerField":
return "SmallAutoField"
return field_type
def get_table_list(self, cursor):
"""Return a list of table and view names in the current database."""
cursor.execute(
"""
SELECT
c.relname,
CASE
WHEN c.relispartition THEN 'p'
WHEN c.relkind IN ('m', 'v') THEN 'v'
ELSE 't'
END
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('f', 'm', 'p', 'r', 'v')
AND n.nspname NOT IN ('pg_catalog', 'pg_toast')
AND pg_catalog.pg_table_is_visible(c.oid)
"""
)
return [
TableInfo(*row)
for row in cursor.fetchall()
if row[0] not in self.ignored_tables
]
def get_table_description(self, cursor, table_name):
"""
Return a description of the table with the DB-API cursor.description
interface.
"""
# Query the pg_catalog tables as cursor.description does not reliably
# return the nullable property and information_schema.columns does not
# contain details of materialized views.
cursor.execute(
"""
SELECT
a.attname AS column_name,
NOT (a.attnotnull OR (t.typtype = 'd' AND t.typnotnull)) AS is_nullable,
pg_get_expr(ad.adbin, ad.adrelid) AS column_default,
CASE WHEN collname = 'default' THEN NULL ELSE collname END AS collation,
a.attidentity != '' AS is_autofield
FROM pg_attribute a
LEFT JOIN pg_attrdef ad ON a.attrelid = ad.adrelid AND a.attnum = ad.adnum
LEFT JOIN pg_collation co ON a.attcollation = co.oid
JOIN pg_type t ON a.atttypid = t.oid
JOIN pg_class c ON a.attrelid = c.oid
JOIN pg_namespace n ON c.relnamespace = n.oid
WHERE c.relkind IN ('f', 'm', 'p', 'r', 'v')
AND c.relname = %s
AND n.nspname NOT IN ('pg_catalog', 'pg_toast')
AND pg_catalog.pg_table_is_visible(c.oid)
""",
[table_name],
)
field_map = {line[0]: line[1:] for line in cursor.fetchall()}
cursor.execute(
"SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)
)
return [
FieldInfo(
line.name,
line.type_code,
line.display_size,
line.internal_size,
line.precision,
line.scale,
*field_map[line.name],
)
for line in cursor.description
]
def get_sequences(self, cursor, table_name, table_fields=()):
cursor.execute(
"""
SELECT
s.relname AS sequence_name,
a.attname AS colname
FROM
pg_class s
JOIN pg_depend d ON d.objid = s.oid
AND d.classid = 'pg_class'::regclass
AND d.refclassid = 'pg_class'::regclass
JOIN pg_attribute a ON d.refobjid = a.attrelid
AND d.refobjsubid = a.attnum
JOIN pg_class tbl ON tbl.oid = d.refobjid
AND tbl.relname = %s
AND pg_catalog.pg_table_is_visible(tbl.oid)
WHERE
s.relkind = 'S';
""",
[table_name],
)
return [
{"name": row[0], "table": table_name, "column": row[1]}
for row in cursor.fetchall()
]
def get_relations(self, cursor, table_name):
"""
Return a dictionary of {field_name: (field_name_other_table, other_table)}
representing all foreign keys in the given table.
"""
cursor.execute(
"""
SELECT a1.attname, c2.relname, a2.attname
FROM pg_constraint con
LEFT JOIN pg_class c1 ON con.conrelid = c1.oid
LEFT JOIN pg_class c2 ON con.confrelid = c2.oid
LEFT JOIN
pg_attribute a1 ON c1.oid = a1.attrelid AND a1.attnum = con.conkey[1]
LEFT JOIN
pg_attribute a2 ON c2.oid = a2.attrelid AND a2.attnum = con.confkey[1]
WHERE
c1.relname = %s AND
con.contype = 'f' AND
c1.relnamespace = c2.relnamespace AND
pg_catalog.pg_table_is_visible(c1.oid)
""",
[table_name],
)
return {row[0]: (row[2], row[1]) for row in cursor.fetchall()}
def get_constraints(self, cursor, table_name):
"""
Retrieve any constraints or keys (unique, pk, fk, check, index) across
one or more columns. Also retrieve the definition of expression-based
indexes.
"""
constraints = {}
# Loop over the key table, collecting things as constraints. The column
# array must return column names in the same order in which they were
# created.
cursor.execute(
"""
SELECT
c.conname,
array(
SELECT attname
FROM unnest(c.conkey) WITH ORDINALITY cols(colid, arridx)
JOIN pg_attribute AS ca ON cols.colid = ca.attnum
WHERE ca.attrelid = c.conrelid
ORDER BY cols.arridx
),
c.contype,
(SELECT fkc.relname || '.' || fka.attname
FROM pg_attribute AS fka
JOIN pg_class AS fkc ON fka.attrelid = fkc.oid
WHERE fka.attrelid = c.confrelid AND fka.attnum = c.confkey[1]),
cl.reloptions
FROM pg_constraint AS c
JOIN pg_class AS cl ON c.conrelid = cl.oid
WHERE cl.relname = %s AND pg_catalog.pg_table_is_visible(cl.oid)
""",
[table_name],
)
for constraint, columns, kind, used_cols, options in cursor.fetchall():
constraints[constraint] = {
"columns": columns,
"primary_key": kind == "p",
"unique": kind in ["p", "u"],
"foreign_key": tuple(used_cols.split(".", 1)) if kind == "f" else None,
"check": kind == "c",
"index": False,
"definition": None,
"options": options,
}
# Now get indexes
cursor.execute(
"""
SELECT
indexname,
array_agg(attname ORDER BY arridx),
indisunique,
indisprimary,
array_agg(ordering ORDER BY arridx),
amname,
exprdef,
s2.attoptions
FROM (
SELECT
c2.relname as indexname, idx.*, attr.attname, am.amname,
CASE
WHEN idx.indexprs IS NOT NULL THEN
pg_get_indexdef(idx.indexrelid)
END AS exprdef,
CASE am.amname
WHEN %s THEN
CASE (option & 1)
WHEN 1 THEN 'DESC' ELSE 'ASC'
END
END as ordering,
c2.reloptions as attoptions
FROM (
SELECT *
FROM
pg_index i,
unnest(i.indkey, i.indoption)
WITH ORDINALITY koi(key, option, arridx)
) idx
LEFT JOIN pg_class c ON idx.indrelid = c.oid
LEFT JOIN pg_class c2 ON idx.indexrelid = c2.oid
LEFT JOIN pg_am am ON c2.relam = am.oid
LEFT JOIN
pg_attribute attr ON attr.attrelid = c.oid AND attr.attnum = idx.key
WHERE c.relname = %s AND pg_catalog.pg_table_is_visible(c.oid)
) s2
GROUP BY indexname, indisunique, indisprimary, amname, exprdef, attoptions;
""",
[self.index_default_access_method, table_name],
)
for (
index,
columns,
unique,
primary,
orders,
type_,
definition,
options,
) in cursor.fetchall():
if index not in constraints:
basic_index = (
type_ == self.index_default_access_method
and
# '_btree' references
# django.contrib.postgres.indexes.BTreeIndex.suffix.
not index.endswith("_btree")
and options is None
)
constraints[index] = {
"columns": columns if columns != [None] else [],
"orders": orders if orders != [None] else [],
"primary_key": primary,
"unique": unique,
"foreign_key": None,
"check": False,
"index": True,
"type": Index.suffix if basic_index else type_,
"definition": definition,
"options": options,
}
return constraints
|
f174af87290899ef2fabc67d743d13de626f8632bb6bcaf787272927a422ae45 | """
PostgreSQL database backend for Django.
Requires psycopg 2: https://www.psycopg.org/
"""
import asyncio
import threading
import warnings
from contextlib import contextmanager
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import DatabaseError as WrappedDatabaseError
from django.db import connections
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.utils import CursorDebugWrapper as BaseCursorDebugWrapper
from django.utils.asyncio import async_unsafe
from django.utils.functional import cached_property
from django.utils.safestring import SafeString
from django.utils.version import get_version_tuple
try:
import psycopg2 as Database
import psycopg2.extensions
import psycopg2.extras
except ImportError as e:
raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e)
def psycopg2_version():
version = psycopg2.__version__.split(" ", 1)[0]
return get_version_tuple(version)
PSYCOPG2_VERSION = psycopg2_version()
if PSYCOPG2_VERSION < (2, 8, 4):
raise ImproperlyConfigured(
"psycopg2 version 2.8.4 or newer is required; you have %s"
% psycopg2.__version__
)
# Some of these import psycopg2, so import them after checking if it's installed.
from .client import DatabaseClient # NOQA
from .creation import DatabaseCreation # NOQA
from .features import DatabaseFeatures # NOQA
from .introspection import DatabaseIntrospection # NOQA
from .operations import DatabaseOperations # NOQA
from .schema import DatabaseSchemaEditor # NOQA
psycopg2.extensions.register_adapter(SafeString, psycopg2.extensions.QuotedString)
psycopg2.extras.register_uuid()
# Register support for inet[] manually so we don't have to handle the Inet()
# object on load all the time.
INETARRAY_OID = 1041
INETARRAY = psycopg2.extensions.new_array_type(
(INETARRAY_OID,),
"INETARRAY",
psycopg2.extensions.UNICODE,
)
psycopg2.extensions.register_type(INETARRAY)
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = "postgresql"
display_name = "PostgreSQL"
# This dictionary maps Field objects to their associated PostgreSQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
data_types = {
"AutoField": "integer",
"BigAutoField": "bigint",
"BinaryField": "bytea",
"BooleanField": "boolean",
"CharField": "varchar(%(max_length)s)",
"DateField": "date",
"DateTimeField": "timestamp with time zone",
"DecimalField": "numeric(%(max_digits)s, %(decimal_places)s)",
"DurationField": "interval",
"FileField": "varchar(%(max_length)s)",
"FilePathField": "varchar(%(max_length)s)",
"FloatField": "double precision",
"IntegerField": "integer",
"BigIntegerField": "bigint",
"IPAddressField": "inet",
"GenericIPAddressField": "inet",
"JSONField": "jsonb",
"OneToOneField": "integer",
"PositiveBigIntegerField": "bigint",
"PositiveIntegerField": "integer",
"PositiveSmallIntegerField": "smallint",
"SlugField": "varchar(%(max_length)s)",
"SmallAutoField": "smallint",
"SmallIntegerField": "smallint",
"TextField": "text",
"TimeField": "time",
"UUIDField": "uuid",
}
data_type_check_constraints = {
"PositiveBigIntegerField": '"%(column)s" >= 0',
"PositiveIntegerField": '"%(column)s" >= 0',
"PositiveSmallIntegerField": '"%(column)s" >= 0',
}
data_types_suffix = {
"AutoField": "GENERATED BY DEFAULT AS IDENTITY",
"BigAutoField": "GENERATED BY DEFAULT AS IDENTITY",
"SmallAutoField": "GENERATED BY DEFAULT AS IDENTITY",
}
operators = {
"exact": "= %s",
"iexact": "= UPPER(%s)",
"contains": "LIKE %s",
"icontains": "LIKE UPPER(%s)",
"regex": "~ %s",
"iregex": "~* %s",
"gt": "> %s",
"gte": ">= %s",
"lt": "< %s",
"lte": "<= %s",
"startswith": "LIKE %s",
"endswith": "LIKE %s",
"istartswith": "LIKE UPPER(%s)",
"iendswith": "LIKE UPPER(%s)",
}
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = (
r"REPLACE(REPLACE(REPLACE({}, E'\\', E'\\\\'), E'%%', E'\\%%'), E'_', E'\\_')"
)
pattern_ops = {
"contains": "LIKE '%%' || {} || '%%'",
"icontains": "LIKE '%%' || UPPER({}) || '%%'",
"startswith": "LIKE {} || '%%'",
"istartswith": "LIKE UPPER({}) || '%%'",
"endswith": "LIKE '%%' || {}",
"iendswith": "LIKE '%%' || UPPER({})",
}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
# Classes instantiated in __init__().
client_class = DatabaseClient
creation_class = DatabaseCreation
features_class = DatabaseFeatures
introspection_class = DatabaseIntrospection
ops_class = DatabaseOperations
# PostgreSQL backend-specific attributes.
_named_cursor_idx = 0
def get_database_version(self):
"""
Return a tuple of the database's version.
E.g. for pg_version 120004, return (12, 4).
"""
return divmod(self.pg_version, 10000)
def get_connection_params(self):
settings_dict = self.settings_dict
# None may be used to connect to the default 'postgres' db
if settings_dict["NAME"] == "" and not settings_dict.get("OPTIONS", {}).get(
"service"
):
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME or OPTIONS['service'] value."
)
if len(settings_dict["NAME"] or "") > self.ops.max_name_length():
raise ImproperlyConfigured(
"The database name '%s' (%d characters) is longer than "
"PostgreSQL's limit of %d characters. Supply a shorter NAME "
"in settings.DATABASES."
% (
settings_dict["NAME"],
len(settings_dict["NAME"]),
self.ops.max_name_length(),
)
)
conn_params = {}
if settings_dict["NAME"]:
conn_params = {
"database": settings_dict["NAME"],
**settings_dict["OPTIONS"],
}
elif settings_dict["NAME"] is None:
# Connect to the default 'postgres' db.
settings_dict.get("OPTIONS", {}).pop("service", None)
conn_params = {"database": "postgres", **settings_dict["OPTIONS"]}
else:
conn_params = {**settings_dict["OPTIONS"]}
conn_params.pop("isolation_level", None)
if settings_dict["USER"]:
conn_params["user"] = settings_dict["USER"]
if settings_dict["PASSWORD"]:
conn_params["password"] = settings_dict["PASSWORD"]
if settings_dict["HOST"]:
conn_params["host"] = settings_dict["HOST"]
if settings_dict["PORT"]:
conn_params["port"] = settings_dict["PORT"]
return conn_params
@async_unsafe
def get_new_connection(self, conn_params):
connection = Database.connect(**conn_params)
# self.isolation_level must be set:
# - after connecting to the database in order to obtain the database's
# default when no value is explicitly specified in options.
# - before calling _set_autocommit() because if autocommit is on, that
# will set connection.isolation_level to ISOLATION_LEVEL_AUTOCOMMIT.
options = self.settings_dict["OPTIONS"]
try:
self.isolation_level = options["isolation_level"]
except KeyError:
self.isolation_level = connection.isolation_level
else:
# Set the isolation level to the value from OPTIONS.
if self.isolation_level != connection.isolation_level:
connection.set_session(isolation_level=self.isolation_level)
# Register dummy loads() to avoid a round trip from psycopg2's decode
# to json.dumps() to json.loads(), when using a custom decoder in
# JSONField.
psycopg2.extras.register_default_jsonb(
conn_or_curs=connection, loads=lambda x: x
)
return connection
def ensure_timezone(self):
if self.connection is None:
return False
conn_timezone_name = self.connection.get_parameter_status("TimeZone")
timezone_name = self.timezone_name
if timezone_name and conn_timezone_name != timezone_name:
with self.connection.cursor() as cursor:
cursor.execute(self.ops.set_time_zone_sql(), [timezone_name])
return True
return False
def init_connection_state(self):
super().init_connection_state()
self.connection.set_client_encoding("UTF8")
timezone_changed = self.ensure_timezone()
if timezone_changed:
# Commit after setting the time zone (see #17062)
if not self.get_autocommit():
self.connection.commit()
@async_unsafe
def create_cursor(self, name=None):
if name:
# In autocommit mode, the cursor will be used outside of a
# transaction, hence use a holdable cursor.
cursor = self.connection.cursor(
name, scrollable=False, withhold=self.connection.autocommit
)
else:
cursor = self.connection.cursor()
cursor.tzinfo_factory = self.tzinfo_factory if settings.USE_TZ else None
return cursor
def tzinfo_factory(self, offset):
return self.timezone
@async_unsafe
def chunked_cursor(self):
self._named_cursor_idx += 1
# Get the current async task
# Note that right now this is behind @async_unsafe, so this is
# unreachable, but in future we'll start loosening this restriction.
# For now, it's here so that every use of "threading" is
# also async-compatible.
try:
current_task = asyncio.current_task()
except RuntimeError:
current_task = None
# Current task can be none even if the current_task call didn't error
if current_task:
task_ident = str(id(current_task))
else:
task_ident = "sync"
# Use that and the thread ident to get a unique name
return self._cursor(
name="_django_curs_%d_%s_%d"
% (
# Avoid reusing name in other threads / tasks
threading.current_thread().ident,
task_ident,
self._named_cursor_idx,
)
)
def _set_autocommit(self, autocommit):
with self.wrap_database_errors:
self.connection.autocommit = autocommit
def check_constraints(self, table_names=None):
"""
Check constraints by setting them to immediate. Return them to deferred
afterward.
"""
with self.cursor() as cursor:
cursor.execute("SET CONSTRAINTS ALL IMMEDIATE")
cursor.execute("SET CONSTRAINTS ALL DEFERRED")
def is_usable(self):
try:
# Use a psycopg cursor directly, bypassing Django's utilities.
with self.connection.cursor() as cursor:
cursor.execute("SELECT 1")
except Database.Error:
return False
else:
return True
@contextmanager
def _nodb_cursor(self):
cursor = None
try:
with super()._nodb_cursor() as cursor:
yield cursor
except (Database.DatabaseError, WrappedDatabaseError):
if cursor is not None:
raise
warnings.warn(
"Normally Django will use a connection to the 'postgres' database "
"to avoid running initialization queries against the production "
"database when it's not needed (for example, when running tests). "
"Django was unable to create a connection to the 'postgres' database "
"and will use the first PostgreSQL database instead.",
RuntimeWarning,
)
for connection in connections.all():
if (
connection.vendor == "postgresql"
and connection.settings_dict["NAME"] != "postgres"
):
conn = self.__class__(
{
**self.settings_dict,
"NAME": connection.settings_dict["NAME"],
},
alias=self.alias,
)
try:
with conn.cursor() as cursor:
yield cursor
finally:
conn.close()
break
else:
raise
@cached_property
def pg_version(self):
with self.temporary_connection():
return self.connection.server_version
def make_debug_cursor(self, cursor):
return CursorDebugWrapper(cursor, self)
class CursorDebugWrapper(BaseCursorDebugWrapper):
def copy_expert(self, sql, file, *args):
with self.debug_sql(sql):
return self.cursor.copy_expert(sql, file, *args)
def copy_to(self, file, table, *args, **kwargs):
with self.debug_sql(sql="COPY %s TO STDOUT" % table):
return self.cursor.copy_to(file, table, *args, **kwargs)
|
f1154a42474d8994aeaa863e419444329395d84060352d80c1878087fdec286c | from psycopg2.extras import Inet
from django.conf import settings
from django.db.backends.base.operations import BaseDatabaseOperations
from django.db.backends.utils import split_tzname_delta
from django.db.models.constants import OnConflict
class DatabaseOperations(BaseDatabaseOperations):
cast_char_field_without_max_length = "varchar"
explain_prefix = "EXPLAIN"
explain_options = frozenset(
[
"ANALYZE",
"BUFFERS",
"COSTS",
"SETTINGS",
"SUMMARY",
"TIMING",
"VERBOSE",
"WAL",
]
)
cast_data_types = {
"AutoField": "integer",
"BigAutoField": "bigint",
"SmallAutoField": "smallint",
}
def unification_cast_sql(self, output_field):
internal_type = output_field.get_internal_type()
if internal_type in (
"GenericIPAddressField",
"IPAddressField",
"TimeField",
"UUIDField",
):
# PostgreSQL will resolve a union as type 'text' if input types are
# 'unknown'.
# https://www.postgresql.org/docs/current/typeconv-union-case.html
# These fields cannot be implicitly cast back in the default
# PostgreSQL configuration so we need to explicitly cast them.
# We must also remove components of the type within brackets:
# varchar(255) -> varchar.
return (
"CAST(%%s AS %s)" % output_field.db_type(self.connection).split("(")[0]
)
return "%s"
def date_extract_sql(self, lookup_type, field_name):
# https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT
if lookup_type == "week_day":
# For consistency across backends, we return Sunday=1, Saturday=7.
return "EXTRACT('dow' FROM %s) + 1" % field_name
elif lookup_type == "iso_week_day":
return "EXTRACT('isodow' FROM %s)" % field_name
elif lookup_type == "iso_year":
return "EXTRACT('isoyear' FROM %s)" % field_name
else:
return "EXTRACT('%s' FROM %s)" % (lookup_type, field_name)
def date_trunc_sql(self, lookup_type, field_name, tzname=None):
field_name = self._convert_field_to_tz(field_name, tzname)
# https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
def _prepare_tzname_delta(self, tzname):
tzname, sign, offset = split_tzname_delta(tzname)
if offset:
sign = "-" if sign == "+" else "+"
return f"{tzname}{sign}{offset}"
return tzname
def _convert_field_to_tz(self, field_name, tzname):
if tzname and settings.USE_TZ:
field_name = "%s AT TIME ZONE '%s'" % (
field_name,
self._prepare_tzname_delta(tzname),
)
return field_name
def datetime_cast_date_sql(self, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
return "(%s)::date" % field_name
def datetime_cast_time_sql(self, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
return "(%s)::time" % field_name
def datetime_extract_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
if lookup_type == "second":
# Truncate fractional seconds.
return f"EXTRACT('second' FROM DATE_TRUNC('second', {field_name}))"
return self.date_extract_sql(lookup_type, field_name)
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
field_name = self._convert_field_to_tz(field_name, tzname)
# https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-TRUNC
return "DATE_TRUNC('%s', %s)" % (lookup_type, field_name)
def time_extract_sql(self, lookup_type, field_name):
if lookup_type == "second":
# Truncate fractional seconds.
return f"EXTRACT('second' FROM DATE_TRUNC('second', {field_name}))"
return self.date_extract_sql(lookup_type, field_name)
def time_trunc_sql(self, lookup_type, field_name, tzname=None):
field_name = self._convert_field_to_tz(field_name, tzname)
return "DATE_TRUNC('%s', %s)::time" % (lookup_type, field_name)
def deferrable_sql(self):
return " DEFERRABLE INITIALLY DEFERRED"
def fetch_returned_insert_rows(self, cursor):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the tuple of returned data.
"""
return cursor.fetchall()
def lookup_cast(self, lookup_type, internal_type=None):
lookup = "%s"
# Cast text lookups to text to allow things like filter(x__contains=4)
if lookup_type in (
"iexact",
"contains",
"icontains",
"startswith",
"istartswith",
"endswith",
"iendswith",
"regex",
"iregex",
):
if internal_type in ("IPAddressField", "GenericIPAddressField"):
lookup = "HOST(%s)"
elif internal_type in ("CICharField", "CIEmailField", "CITextField"):
lookup = "%s::citext"
else:
lookup = "%s::text"
# Use UPPER(x) for case-insensitive lookups; it's faster.
if lookup_type in ("iexact", "icontains", "istartswith", "iendswith"):
lookup = "UPPER(%s)" % lookup
return lookup
def no_limit_value(self):
return None
def prepare_sql_script(self, sql):
return [sql]
def quote_name(self, name):
if name.startswith('"') and name.endswith('"'):
return name # Quoting once is enough.
return '"%s"' % name
def set_time_zone_sql(self):
return "SET TIME ZONE %s"
def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
if not tables:
return []
# Perform a single SQL 'TRUNCATE x, y, z...;' statement. It allows us
# to truncate tables referenced by a foreign key in any other table.
sql_parts = [
style.SQL_KEYWORD("TRUNCATE"),
", ".join(style.SQL_FIELD(self.quote_name(table)) for table in tables),
]
if reset_sequences:
sql_parts.append(style.SQL_KEYWORD("RESTART IDENTITY"))
if allow_cascade:
sql_parts.append(style.SQL_KEYWORD("CASCADE"))
return ["%s;" % " ".join(sql_parts)]
def sequence_reset_by_name_sql(self, style, sequences):
# 'ALTER SEQUENCE sequence_name RESTART WITH 1;'... style SQL statements
# to reset sequence indices
sql = []
for sequence_info in sequences:
table_name = sequence_info["table"]
# 'id' will be the case if it's an m2m using an autogenerated
# intermediate table (see BaseDatabaseIntrospection.sequence_list).
column_name = sequence_info["column"] or "id"
sql.append(
"%s setval(pg_get_serial_sequence('%s','%s'), 1, false);"
% (
style.SQL_KEYWORD("SELECT"),
style.SQL_TABLE(self.quote_name(table_name)),
style.SQL_FIELD(column_name),
)
)
return sql
def tablespace_sql(self, tablespace, inline=False):
if inline:
return "USING INDEX TABLESPACE %s" % self.quote_name(tablespace)
else:
return "TABLESPACE %s" % self.quote_name(tablespace)
def sequence_reset_sql(self, style, model_list):
from django.db import models
output = []
qn = self.quote_name
for model in model_list:
# Use `coalesce` to set the sequence for each model to the max pk
# value if there are records, or 1 if there are none. Set the
# `is_called` property (the third argument to `setval`) to true if
# there are records (as the max pk value is already in use),
# otherwise set it to false. Use pg_get_serial_sequence to get the
# underlying sequence name from the table name and column name.
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
output.append(
"%s setval(pg_get_serial_sequence('%s','%s'), "
"coalesce(max(%s), 1), max(%s) %s null) %s %s;"
% (
style.SQL_KEYWORD("SELECT"),
style.SQL_TABLE(qn(model._meta.db_table)),
style.SQL_FIELD(f.column),
style.SQL_FIELD(qn(f.column)),
style.SQL_FIELD(qn(f.column)),
style.SQL_KEYWORD("IS NOT"),
style.SQL_KEYWORD("FROM"),
style.SQL_TABLE(qn(model._meta.db_table)),
)
)
# Only one AutoField is allowed per model, so don't bother
# continuing.
break
return output
def prep_for_iexact_query(self, x):
return x
def max_name_length(self):
"""
Return the maximum length of an identifier.
The maximum length of an identifier is 63 by default, but can be
changed by recompiling PostgreSQL after editing the NAMEDATALEN
macro in src/include/pg_config_manual.h.
This implementation returns 63, but can be overridden by a custom
database backend that inherits most of its behavior from this one.
"""
return 63
def distinct_sql(self, fields, params):
if fields:
params = [param for param_list in params for param in param_list]
return (["DISTINCT ON (%s)" % ", ".join(fields)], params)
else:
return ["DISTINCT"], []
def last_executed_query(self, cursor, sql, params):
# https://www.psycopg.org/docs/cursor.html#cursor.query
# The query attribute is a Psycopg extension to the DB API 2.0.
if cursor.query is not None:
return cursor.query.decode()
return None
def return_insert_columns(self, fields):
if not fields:
return "", ()
columns = [
"%s.%s"
% (
self.quote_name(field.model._meta.db_table),
self.quote_name(field.column),
)
for field in fields
]
return "RETURNING %s" % ", ".join(columns), ()
def bulk_insert_sql(self, fields, placeholder_rows):
placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)
values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql)
return "VALUES " + values_sql
def adapt_datefield_value(self, value):
return value
def adapt_datetimefield_value(self, value):
return value
def adapt_timefield_value(self, value):
return value
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
return value
def adapt_ipaddressfield_value(self, value):
if value:
return Inet(value)
return None
def subtract_temporals(self, internal_type, lhs, rhs):
if internal_type == "DateField":
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
params = (*lhs_params, *rhs_params)
return "(interval '1 day' * (%s - %s))" % (lhs_sql, rhs_sql), params
return super().subtract_temporals(internal_type, lhs, rhs)
def explain_query_prefix(self, format=None, **options):
extra = {}
# Normalize options.
if options:
options = {
name.upper(): "true" if value else "false"
for name, value in options.items()
}
for valid_option in self.explain_options:
value = options.pop(valid_option, None)
if value is not None:
extra[valid_option] = value
prefix = super().explain_query_prefix(format, **options)
if format:
extra["FORMAT"] = format
if extra:
prefix += " (%s)" % ", ".join("%s %s" % i for i in extra.items())
return prefix
def on_conflict_suffix_sql(self, fields, on_conflict, update_fields, unique_fields):
if on_conflict == OnConflict.IGNORE:
return "ON CONFLICT DO NOTHING"
if on_conflict == OnConflict.UPDATE:
return "ON CONFLICT(%s) DO UPDATE SET %s" % (
", ".join(map(self.quote_name, unique_fields)),
", ".join(
[
f"{field} = EXCLUDED.{field}"
for field in map(self.quote_name, update_fields)
]
),
)
return super().on_conflict_suffix_sql(
fields,
on_conflict,
update_fields,
unique_fields,
)
|
6628604a5aa7ff02e8b1daa57a0e037180a78968b2695bb178243cb0af882dc6 | import psycopg2
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.ddl_references import IndexColumns
from django.db.backends.utils import strip_quotes
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE"
sql_create_index = (
"CREATE INDEX %(name)s ON %(table)s%(using)s "
"(%(columns)s)%(include)s%(extra)s%(condition)s"
)
sql_create_index_concurrently = (
"CREATE INDEX CONCURRENTLY %(name)s ON %(table)s%(using)s "
"(%(columns)s)%(include)s%(extra)s%(condition)s"
)
sql_delete_index = "DROP INDEX IF EXISTS %(name)s"
sql_delete_index_concurrently = "DROP INDEX CONCURRENTLY IF EXISTS %(name)s"
# Setting the constraint to IMMEDIATE to allow changing data in the same
# transaction.
sql_create_column_inline_fk = (
"CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s"
"; SET CONSTRAINTS %(namespace)s%(name)s IMMEDIATE"
)
# Setting the constraint to IMMEDIATE runs any deferred checks to allow
# dropping it in the same transaction.
sql_delete_fk = (
"SET CONSTRAINTS %(name)s IMMEDIATE; "
"ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
)
sql_delete_procedure = "DROP FUNCTION %(procedure)s(%(param_types)s)"
sql_add_identity = (
"ALTER TABLE %(table)s ALTER COLUMN %(column)s ADD "
"GENERATED BY DEFAULT AS IDENTITY"
)
sql_drop_indentity = (
"ALTER TABLE %(table)s ALTER COLUMN %(column)s DROP IDENTITY IF EXISTS"
)
def quote_value(self, value):
if isinstance(value, str):
value = value.replace("%", "%%")
adapted = psycopg2.extensions.adapt(value)
if hasattr(adapted, "encoding"):
adapted.encoding = "utf8"
# getquoted() returns a quoted bytestring of the adapted value.
return adapted.getquoted().decode()
def _field_indexes_sql(self, model, field):
output = super()._field_indexes_sql(model, field)
like_index_statement = self._create_like_index_sql(model, field)
if like_index_statement is not None:
output.append(like_index_statement)
return output
def _field_data_type(self, field):
if field.is_relation:
return field.rel_db_type(self.connection)
return self.connection.data_types.get(
field.get_internal_type(),
field.db_type(self.connection),
)
def _field_base_data_types(self, field):
# Yield base data types for array fields.
if field.base_field.get_internal_type() == "ArrayField":
yield from self._field_base_data_types(field.base_field)
else:
yield self._field_data_type(field.base_field)
def _create_like_index_sql(self, model, field):
"""
Return the statement to create an index with varchar operator pattern
when the column type is 'varchar' or 'text', otherwise return None.
"""
db_type = field.db_type(connection=self.connection)
if db_type is not None and (field.db_index or field.unique):
# Fields with database column types of `varchar` and `text` need
# a second index that specifies their operator class, which is
# needed when performing correct LIKE queries outside the
# C locale. See #12234.
#
# The same doesn't apply to array fields such as varchar[size]
# and text[size], so skip them.
if "[" in db_type:
return None
if db_type.startswith("varchar"):
return self._create_index_sql(
model,
fields=[field],
suffix="_like",
opclasses=["varchar_pattern_ops"],
)
elif db_type.startswith("text"):
return self._create_index_sql(
model,
fields=[field],
suffix="_like",
opclasses=["text_pattern_ops"],
)
return None
def _alter_column_type_sql(self, model, old_field, new_field, new_type):
self.sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s"
# Cast when data type changed.
using_sql = " USING %(column)s::%(type)s"
new_internal_type = new_field.get_internal_type()
old_internal_type = old_field.get_internal_type()
if new_internal_type == "ArrayField" and new_internal_type == old_internal_type:
# Compare base data types for array fields.
if list(self._field_base_data_types(old_field)) != list(
self._field_base_data_types(new_field)
):
self.sql_alter_column_type += using_sql
elif self._field_data_type(old_field) != self._field_data_type(new_field):
self.sql_alter_column_type += using_sql
# Make ALTER TYPE with IDENTITY make sense.
table = strip_quotes(model._meta.db_table)
auto_field_types = {
"AutoField",
"BigAutoField",
"SmallAutoField",
}
old_is_auto = old_internal_type in auto_field_types
new_is_auto = new_internal_type in auto_field_types
if new_is_auto and not old_is_auto:
column = strip_quotes(new_field.column)
return (
(
self.sql_alter_column_type
% {
"column": self.quote_name(column),
"type": new_type,
},
[],
),
[
(
self.sql_add_identity
% {
"table": self.quote_name(table),
"column": self.quote_name(column),
},
[],
),
],
)
elif old_is_auto and not new_is_auto:
# Drop IDENTITY if exists (pre-Django 4.1 serial columns don't have
# it).
self.execute(
self.sql_drop_indentity
% {
"table": self.quote_name(table),
"column": self.quote_name(strip_quotes(old_field.column)),
}
)
column = strip_quotes(new_field.column)
sequence_name = "%s_%s_seq" % (table, column)
fragment, _ = super()._alter_column_type_sql(
model, old_field, new_field, new_type
)
return fragment, [
(
# Drop the sequence if exists (Django 4.1+ identity columns
# don't have it).
self.sql_delete_sequence
% {
"sequence": self.quote_name(sequence_name),
},
[],
),
]
else:
return super()._alter_column_type_sql(model, old_field, new_field, new_type)
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
# Drop indexes on varchar/text/citext columns that are changing to a
# different type.
if (old_field.db_index or old_field.unique) and (
(old_type.startswith("varchar") and not new_type.startswith("varchar"))
or (old_type.startswith("text") and not new_type.startswith("text"))
or (old_type.startswith("citext") and not new_type.startswith("citext"))
):
index_name = self._create_index_name(
model._meta.db_table, [old_field.column], suffix="_like"
)
self.execute(self._delete_index_sql(model, index_name))
super()._alter_field(
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict,
)
# Added an index? Create any PostgreSQL-specific indexes.
if (not (old_field.db_index or old_field.unique) and new_field.db_index) or (
not old_field.unique and new_field.unique
):
like_index_statement = self._create_like_index_sql(model, new_field)
if like_index_statement is not None:
self.execute(like_index_statement)
# Removed an index? Drop any PostgreSQL-specific indexes.
if old_field.unique and not (new_field.db_index or new_field.unique):
index_to_remove = self._create_index_name(
model._meta.db_table, [old_field.column], suffix="_like"
)
self.execute(self._delete_index_sql(model, index_to_remove))
def _index_columns(self, table, columns, col_suffixes, opclasses):
if opclasses:
return IndexColumns(
table,
columns,
self.quote_name,
col_suffixes=col_suffixes,
opclasses=opclasses,
)
return super()._index_columns(table, columns, col_suffixes, opclasses)
def add_index(self, model, index, concurrently=False):
self.execute(
index.create_sql(model, self, concurrently=concurrently), params=None
)
def remove_index(self, model, index, concurrently=False):
self.execute(index.remove_sql(model, self, concurrently=concurrently))
def _delete_index_sql(self, model, name, sql=None, concurrently=False):
sql = (
self.sql_delete_index_concurrently
if concurrently
else self.sql_delete_index
)
return super()._delete_index_sql(model, name, sql)
def _create_index_sql(
self,
model,
*,
fields=None,
name=None,
suffix="",
using="",
db_tablespace=None,
col_suffixes=(),
sql=None,
opclasses=(),
condition=None,
concurrently=False,
include=None,
expressions=None,
):
sql = (
self.sql_create_index
if not concurrently
else self.sql_create_index_concurrently
)
return super()._create_index_sql(
model,
fields=fields,
name=name,
suffix=suffix,
using=using,
db_tablespace=db_tablespace,
col_suffixes=col_suffixes,
sql=sql,
opclasses=opclasses,
condition=condition,
include=include,
expressions=expressions,
)
|
6ebbe1fb51de14f0142ca4dc2d26efaaf184bb7e48732d2483c1597ea222c736 | import copy
from decimal import Decimal
from django.apps.registry import Apps
from django.db import NotSupportedError
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.ddl_references import Statement
from django.db.backends.utils import strip_quotes
from django.db.models import UniqueConstraint
from django.db.transaction import atomic
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_delete_table = "DROP TABLE %(table)s"
sql_create_fk = None
sql_create_inline_fk = (
"REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED"
)
sql_create_column_inline_fk = sql_create_inline_fk
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"
sql_create_unique = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)"
sql_delete_unique = "DROP INDEX %(name)s"
def __enter__(self):
# Some SQLite schema alterations need foreign key constraints to be
# disabled. Enforce it here for the duration of the schema edition.
if not self.connection.disable_constraint_checking():
raise NotSupportedError(
"SQLite schema editor cannot be used while foreign key "
"constraint checks are enabled. Make sure to disable them "
"before entering a transaction.atomic() context because "
"SQLite does not support disabling them in the middle of "
"a multi-statement transaction."
)
return super().__enter__()
def __exit__(self, exc_type, exc_value, traceback):
self.connection.check_constraints()
super().__exit__(exc_type, exc_value, traceback)
self.connection.enable_constraint_checking()
def quote_value(self, value):
# The backend "mostly works" without this function and there are use
# cases for compiling Python without the sqlite3 libraries (e.g.
# security hardening).
try:
import sqlite3
value = sqlite3.adapt(value)
except ImportError:
pass
except sqlite3.ProgrammingError:
pass
# Manual emulation of SQLite parameter quoting
if isinstance(value, bool):
return str(int(value))
elif isinstance(value, (Decimal, float, int)):
return str(value)
elif isinstance(value, str):
return "'%s'" % value.replace("'", "''")
elif value is None:
return "NULL"
elif isinstance(value, (bytes, bytearray, memoryview)):
# Bytes are only allowed for BLOB fields, encoded as string
# literals containing hexadecimal data and preceded by a single "X"
# character.
return "X'%s'" % value.hex()
else:
raise ValueError(
"Cannot quote parameter value %r of type %s" % (value, type(value))
)
def prepare_default(self, value):
return self.quote_value(value)
def _is_referenced_by_fk_constraint(
self, table_name, column_name=None, ignore_self=False
):
"""
Return whether or not the provided table name is referenced by another
one. If `column_name` is specified, only references pointing to that
column are considered. If `ignore_self` is True, self-referential
constraints are ignored.
"""
with self.connection.cursor() as cursor:
for other_table in self.connection.introspection.get_table_list(cursor):
if ignore_self and other_table.name == table_name:
continue
relations = self.connection.introspection.get_relations(
cursor, other_table.name
)
for constraint_column, constraint_table in relations.values():
if constraint_table == table_name and (
column_name is None or constraint_column == column_name
):
return True
return False
def alter_db_table(
self, model, old_db_table, new_db_table, disable_constraints=True
):
if (
not self.connection.features.supports_atomic_references_rename
and disable_constraints
and self._is_referenced_by_fk_constraint(old_db_table)
):
if self.connection.in_atomic_block:
raise NotSupportedError(
(
"Renaming the %r table while in a transaction is not "
"supported on SQLite < 3.26 because it would break referential "
"integrity. Try adding `atomic = False` to the Migration class."
)
% old_db_table
)
self.connection.enable_constraint_checking()
super().alter_db_table(model, old_db_table, new_db_table)
self.connection.disable_constraint_checking()
else:
super().alter_db_table(model, old_db_table, new_db_table)
def alter_field(self, model, old_field, new_field, strict=False):
if not self._field_should_be_altered(old_field, new_field):
return
old_field_name = old_field.name
table_name = model._meta.db_table
_, old_column_name = old_field.get_attname_column()
if (
new_field.name != old_field_name
and not self.connection.features.supports_atomic_references_rename
and self._is_referenced_by_fk_constraint(
table_name, old_column_name, ignore_self=True
)
):
if self.connection.in_atomic_block:
raise NotSupportedError(
(
"Renaming the %r.%r column while in a transaction is not "
"supported on SQLite < 3.26 because it would break referential "
"integrity. Try adding `atomic = False` to the Migration class."
)
% (model._meta.db_table, old_field_name)
)
with atomic(self.connection.alias):
super().alter_field(model, old_field, new_field, strict=strict)
# Follow SQLite's documented procedure for performing changes
# that don't affect the on-disk content.
# https://sqlite.org/lang_altertable.html#otheralter
with self.connection.cursor() as cursor:
schema_version = cursor.execute("PRAGMA schema_version").fetchone()[
0
]
cursor.execute("PRAGMA writable_schema = 1")
references_template = ' REFERENCES "%s" ("%%s") ' % table_name
new_column_name = new_field.get_attname_column()[1]
search = references_template % old_column_name
replacement = references_template % new_column_name
cursor.execute(
"UPDATE sqlite_master SET sql = replace(sql, %s, %s)",
(search, replacement),
)
cursor.execute("PRAGMA schema_version = %d" % (schema_version + 1))
cursor.execute("PRAGMA writable_schema = 0")
# The integrity check will raise an exception and rollback
# the transaction if the sqlite_master updates corrupt the
# database.
cursor.execute("PRAGMA integrity_check")
# Perform a VACUUM to refresh the database representation from
# the sqlite_master table.
with self.connection.cursor() as cursor:
cursor.execute("VACUUM")
else:
super().alter_field(model, old_field, new_field, strict=strict)
def _remake_table(
self, model, create_field=None, delete_field=None, alter_field=None
):
"""
Shortcut to transform a model from old_model into new_model
This follows the correct procedure to perform non-rename or column
addition operations based on SQLite's documentation
https://www.sqlite.org/lang_altertable.html#caution
The essential steps are:
1. Create a table with the updated definition called "new__app_model"
2. Copy the data from the existing "app_model" table to the new table
3. Drop the "app_model" table
4. Rename the "new__app_model" table to "app_model"
5. Restore any index of the previous "app_model" table.
"""
# Self-referential fields must be recreated rather than copied from
# the old model to ensure their remote_field.field_name doesn't refer
# to an altered field.
def is_self_referential(f):
return f.is_relation and f.remote_field.model is model
# Work out the new fields dict / mapping
body = {
f.name: f.clone() if is_self_referential(f) else f
for f in model._meta.local_concrete_fields
}
# Since mapping might mix column names and default values,
# its values must be already quoted.
mapping = {
f.column: self.quote_name(f.column)
for f in model._meta.local_concrete_fields
}
# This maps field names (not columns) for things like unique_together
rename_mapping = {}
# If any of the new or altered fields is introducing a new PK,
# remove the old one
restore_pk_field = None
if getattr(create_field, "primary_key", False) or (
alter_field and getattr(alter_field[1], "primary_key", False)
):
for name, field in list(body.items()):
if field.primary_key and not (
# Do not remove the old primary key when an altered field
# that introduces a primary key is the same field.
alter_field
and name == alter_field[1].name
):
field.primary_key = False
restore_pk_field = field
if field.auto_created:
del body[name]
del mapping[field.column]
# Add in any created fields
if create_field:
body[create_field.name] = create_field
# Choose a default and insert it into the copy map
if not create_field.many_to_many and create_field.concrete:
mapping[create_field.column] = self.prepare_default(
self.effective_default(create_field),
)
# Add in any altered fields
if alter_field:
old_field, new_field = alter_field
body.pop(old_field.name, None)
mapping.pop(old_field.column, None)
body[new_field.name] = new_field
if old_field.null and not new_field.null:
case_sql = "coalesce(%(col)s, %(default)s)" % {
"col": self.quote_name(old_field.column),
"default": self.prepare_default(self.effective_default(new_field)),
}
mapping[new_field.column] = case_sql
else:
mapping[new_field.column] = self.quote_name(old_field.column)
rename_mapping[old_field.name] = new_field.name
# Remove any deleted fields
if delete_field:
del body[delete_field.name]
del mapping[delete_field.column]
# Remove any implicit M2M tables
if (
delete_field.many_to_many
and delete_field.remote_field.through._meta.auto_created
):
return self.delete_model(delete_field.remote_field.through)
# Work inside a new app registry
apps = Apps()
# Work out the new value of unique_together, taking renames into
# account
unique_together = [
[rename_mapping.get(n, n) for n in unique]
for unique in model._meta.unique_together
]
# Work out the new value for index_together, taking renames into
# account
index_together = [
[rename_mapping.get(n, n) for n in index]
for index in model._meta.index_together
]
indexes = model._meta.indexes
if delete_field:
indexes = [
index for index in indexes if delete_field.name not in index.fields
]
constraints = list(model._meta.constraints)
# Provide isolated instances of the fields to the new model body so
# that the existing model's internals aren't interfered with when
# the dummy model is constructed.
body_copy = copy.deepcopy(body)
# Construct a new model with the new fields to allow self referential
# primary key to resolve to. This model won't ever be materialized as a
# table and solely exists for foreign key reference resolution purposes.
# This wouldn't be required if the schema editor was operating on model
# states instead of rendered models.
meta_contents = {
"app_label": model._meta.app_label,
"db_table": model._meta.db_table,
"unique_together": unique_together,
"index_together": index_together,
"indexes": indexes,
"constraints": constraints,
"apps": apps,
}
meta = type("Meta", (), meta_contents)
body_copy["Meta"] = meta
body_copy["__module__"] = model.__module__
type(model._meta.object_name, model.__bases__, body_copy)
# Construct a model with a renamed table name.
body_copy = copy.deepcopy(body)
meta_contents = {
"app_label": model._meta.app_label,
"db_table": "new__%s" % strip_quotes(model._meta.db_table),
"unique_together": unique_together,
"index_together": index_together,
"indexes": indexes,
"constraints": constraints,
"apps": apps,
}
meta = type("Meta", (), meta_contents)
body_copy["Meta"] = meta
body_copy["__module__"] = model.__module__
new_model = type("New%s" % model._meta.object_name, model.__bases__, body_copy)
# Create a new table with the updated schema.
self.create_model(new_model)
# Copy data from the old table into the new table
self.execute(
"INSERT INTO %s (%s) SELECT %s FROM %s"
% (
self.quote_name(new_model._meta.db_table),
", ".join(self.quote_name(x) for x in mapping),
", ".join(mapping.values()),
self.quote_name(model._meta.db_table),
)
)
# Delete the old table to make way for the new
self.delete_model(model, handle_autom2m=False)
# Rename the new table to take way for the old
self.alter_db_table(
new_model,
new_model._meta.db_table,
model._meta.db_table,
disable_constraints=False,
)
# Run deferred SQL on correct table
for sql in self.deferred_sql:
self.execute(sql)
self.deferred_sql = []
# Fix any PK-removed field
if restore_pk_field:
restore_pk_field.primary_key = True
def delete_model(self, model, handle_autom2m=True):
if handle_autom2m:
super().delete_model(model)
else:
# Delete the table (and only that)
self.execute(
self.sql_delete_table
% {
"table": self.quote_name(model._meta.db_table),
}
)
# Remove all deferred statements referencing the deleted table.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_table(
model._meta.db_table
):
self.deferred_sql.remove(sql)
def add_field(self, model, field):
"""Create a field on a model."""
if (
# Primary keys and unique fields are not supported in ALTER TABLE
# ADD COLUMN.
field.primary_key
or field.unique
or
# Fields with default values cannot by handled by ALTER TABLE ADD
# COLUMN statement because DROP DEFAULT is not supported in
# ALTER TABLE.
not field.null
or self.effective_default(field) is not None
):
self._remake_table(model, create_field=field)
else:
super().add_field(model, field)
def remove_field(self, model, field):
"""
Remove a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# M2M fields are a special case
if field.many_to_many:
# For implicit M2M tables, delete the auto-created table
if field.remote_field.through._meta.auto_created:
self.delete_model(field.remote_field.through)
# For explicit "through" M2M fields, do nothing
elif (
self.connection.features.can_alter_table_drop_column
# Primary keys, unique fields, and foreign keys are not
# supported in ALTER TABLE DROP COLUMN.
and not field.primary_key
and not field.unique
and not (field.remote_field and field.db_constraint)
):
super().remove_field(model, field)
# For everything else, remake.
else:
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)["type"] is None:
return
self._remake_table(model, delete_field=field)
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
"""Perform a "physical" (non-ManyToMany) field update."""
# Use "ALTER TABLE ... RENAME COLUMN" if only the column name
# changed and there aren't any constraints.
if (
self.connection.features.can_alter_table_rename_column
and old_field.column != new_field.column
and self.column_sql(model, old_field) == self.column_sql(model, new_field)
and not (
old_field.remote_field
and old_field.db_constraint
or new_field.remote_field
and new_field.db_constraint
)
):
return self.execute(
self._rename_field_sql(
model._meta.db_table, old_field, new_field, new_type
)
)
# Alter by remaking table
self._remake_table(model, alter_field=(old_field, new_field))
# Rebuild tables with FKs pointing to this field.
if new_field.unique and old_type != new_type:
related_models = set()
opts = new_field.model._meta
for remote_field in opts.related_objects:
# Ignore self-relationship since the table was already rebuilt.
if remote_field.related_model == model:
continue
if not remote_field.many_to_many:
if remote_field.field_name == new_field.name:
related_models.add(remote_field.related_model)
elif new_field.primary_key and remote_field.through._meta.auto_created:
related_models.add(remote_field.through)
if new_field.primary_key:
for many_to_many in opts.many_to_many:
# Ignore self-relationship since the table was already rebuilt.
if many_to_many.related_model == model:
continue
if many_to_many.remote_field.through._meta.auto_created:
related_models.add(many_to_many.remote_field.through)
for related_model in related_models:
self._remake_table(related_model)
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""Alter M2Ms to repoint their to= endpoints."""
if (
old_field.remote_field.through._meta.db_table
== new_field.remote_field.through._meta.db_table
):
# The field name didn't change, but some options did, so we have to
# propagate this altering.
self._remake_table(
old_field.remote_field.through,
alter_field=(
# The field that points to the target model is needed, so
# we can tell alter_field to change it - this is
# m2m_reverse_field_name() (as opposed to m2m_field_name(),
# which points to our model).
old_field.remote_field.through._meta.get_field(
old_field.m2m_reverse_field_name()
),
new_field.remote_field.through._meta.get_field(
new_field.m2m_reverse_field_name()
),
),
)
return
# Make a new through table
self.create_model(new_field.remote_field.through)
# Copy the data across
self.execute(
"INSERT INTO %s (%s) SELECT %s FROM %s"
% (
self.quote_name(new_field.remote_field.through._meta.db_table),
", ".join(
[
"id",
new_field.m2m_column_name(),
new_field.m2m_reverse_name(),
]
),
", ".join(
[
"id",
old_field.m2m_column_name(),
old_field.m2m_reverse_name(),
]
),
self.quote_name(old_field.remote_field.through._meta.db_table),
)
)
# Delete the old through table
self.delete_model(old_field.remote_field.through)
def add_constraint(self, model, constraint):
if isinstance(constraint, UniqueConstraint) and (
constraint.condition
or constraint.contains_expressions
or constraint.include
or constraint.deferrable
):
super().add_constraint(model, constraint)
else:
self._remake_table(model)
def remove_constraint(self, model, constraint):
if isinstance(constraint, UniqueConstraint) and (
constraint.condition
or constraint.contains_expressions
or constraint.include
or constraint.deferrable
):
super().remove_constraint(model, constraint)
else:
self._remake_table(model)
def _collate_sql(self, collation):
return "COLLATE " + collation
|
861c12dadf0e8b349a3abe3f7c29ecf4d46d5f120889d565b7ce23899f9a719c | import keyword
import re
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections
from django.db.models.constants import LOOKUP_SEP
class Command(BaseCommand):
help = (
"Introspects the database tables in the given database and outputs a Django "
"model module."
)
requires_system_checks = []
stealth_options = ("table_name_filter",)
db_module = "django.db"
def add_arguments(self, parser):
parser.add_argument(
"table",
nargs="*",
type=str,
help="Selects what tables or views should be introspected.",
)
parser.add_argument(
"--database",
default=DEFAULT_DB_ALIAS,
help=(
'Nominates a database to introspect. Defaults to using the "default" '
"database."
),
)
parser.add_argument(
"--include-partitions",
action="store_true",
help="Also output models for partition tables.",
)
parser.add_argument(
"--include-views",
action="store_true",
help="Also output models for database views.",
)
def handle(self, **options):
try:
for line in self.handle_inspection(options):
self.stdout.write(line)
except NotImplementedError:
raise CommandError(
"Database inspection isn't supported for the currently selected "
"database backend."
)
def handle_inspection(self, options):
connection = connections[options["database"]]
# 'table_name_filter' is a stealth option
table_name_filter = options.get("table_name_filter")
def table2model(table_name):
return re.sub(r"[^a-zA-Z0-9]", "", table_name.title())
with connection.cursor() as cursor:
yield "# This is an auto-generated Django model module."
yield "# You'll have to do the following manually to clean this up:"
yield "# * Rearrange models' order"
yield "# * Make sure each model has one field with primary_key=True"
yield (
"# * Make sure each ForeignKey and OneToOneField has `on_delete` set "
"to the desired behavior"
)
yield (
"# * Remove `managed = False` lines if you wish to allow "
"Django to create, modify, and delete the table"
)
yield (
"# Feel free to rename the models, but don't rename db_table values or "
"field names."
)
yield "from %s import models" % self.db_module
known_models = []
table_info = connection.introspection.get_table_list(cursor)
# Determine types of tables and/or views to be introspected.
types = {"t"}
if options["include_partitions"]:
types.add("p")
if options["include_views"]:
types.add("v")
for table_name in options["table"] or sorted(
info.name for info in table_info if info.type in types
):
if table_name_filter is not None and callable(table_name_filter):
if not table_name_filter(table_name):
continue
try:
try:
relations = connection.introspection.get_relations(
cursor, table_name
)
except NotImplementedError:
relations = {}
try:
constraints = connection.introspection.get_constraints(
cursor, table_name
)
except NotImplementedError:
constraints = {}
primary_key_column = (
connection.introspection.get_primary_key_column(
cursor, table_name
)
)
unique_columns = [
c["columns"][0]
for c in constraints.values()
if c["unique"] and len(c["columns"]) == 1
]
table_description = connection.introspection.get_table_description(
cursor, table_name
)
except Exception as e:
yield "# Unable to inspect table '%s'" % table_name
yield "# The error was: %s" % e
continue
yield ""
yield ""
yield "class %s(models.Model):" % table2model(table_name)
known_models.append(table2model(table_name))
used_column_names = [] # Holds column names used in the table so far
column_to_field_name = {} # Maps column names to names of model fields
for row in table_description:
comment_notes = (
[]
) # Holds Field notes, to be displayed in a Python comment.
extra_params = {} # Holds Field parameters such as 'db_column'.
column_name = row.name
is_relation = column_name in relations
att_name, params, notes = self.normalize_col_name(
column_name, used_column_names, is_relation
)
extra_params.update(params)
comment_notes.extend(notes)
used_column_names.append(att_name)
column_to_field_name[column_name] = att_name
# Add primary_key and unique, if necessary.
if column_name == primary_key_column:
extra_params["primary_key"] = True
elif column_name in unique_columns:
extra_params["unique"] = True
if is_relation:
ref_db_column, ref_db_table = relations[column_name]
if extra_params.pop("unique", False) or extra_params.get(
"primary_key"
):
rel_type = "OneToOneField"
else:
rel_type = "ForeignKey"
ref_pk_column = (
connection.introspection.get_primary_key_column(
cursor, ref_db_table
)
)
if ref_pk_column and ref_pk_column != ref_db_column:
extra_params["to_field"] = ref_db_column
rel_to = (
"self"
if ref_db_table == table_name
else table2model(ref_db_table)
)
if rel_to in known_models:
field_type = "%s(%s" % (rel_type, rel_to)
else:
field_type = "%s('%s'" % (rel_type, rel_to)
else:
# Calling `get_field_type` to get the field type string and any
# additional parameters and notes.
field_type, field_params, field_notes = self.get_field_type(
connection, table_name, row
)
extra_params.update(field_params)
comment_notes.extend(field_notes)
field_type += "("
# Don't output 'id = meta.AutoField(primary_key=True)', because
# that's assumed if it doesn't exist.
if att_name == "id" and extra_params == {"primary_key": True}:
if field_type == "AutoField(":
continue
elif (
field_type
== connection.features.introspected_field_types["AutoField"]
+ "("
):
comment_notes.append("AutoField?")
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
# table description.
if row.null_ok: # If it's NULL...
extra_params["blank"] = True
extra_params["null"] = True
field_desc = "%s = %s%s" % (
att_name,
# Custom fields will have a dotted path
"" if "." in field_type else "models.",
field_type,
)
if field_type.startswith(("ForeignKey(", "OneToOneField(")):
field_desc += ", models.DO_NOTHING"
if extra_params:
if not field_desc.endswith("("):
field_desc += ", "
field_desc += ", ".join(
"%s=%r" % (k, v) for k, v in extra_params.items()
)
field_desc += ")"
if comment_notes:
field_desc += " # " + " ".join(comment_notes)
yield " %s" % field_desc
is_view = any(
info.name == table_name and info.type == "v" for info in table_info
)
is_partition = any(
info.name == table_name and info.type == "p" for info in table_info
)
yield from self.get_meta(
table_name, constraints, column_to_field_name, is_view, is_partition
)
def normalize_col_name(self, col_name, used_column_names, is_relation):
"""
Modify the column name to make it Python-compatible as a field name
"""
field_params = {}
field_notes = []
new_name = col_name.lower()
if new_name != col_name:
field_notes.append("Field name made lowercase.")
if is_relation:
if new_name.endswith("_id"):
new_name = new_name[:-3]
else:
field_params["db_column"] = col_name
new_name, num_repl = re.subn(r"\W", "_", new_name)
if num_repl > 0:
field_notes.append("Field renamed to remove unsuitable characters.")
if new_name.find(LOOKUP_SEP) >= 0:
while new_name.find(LOOKUP_SEP) >= 0:
new_name = new_name.replace(LOOKUP_SEP, "_")
if col_name.lower().find(LOOKUP_SEP) >= 0:
# Only add the comment if the double underscore was in the original name
field_notes.append(
"Field renamed because it contained more than one '_' in a row."
)
if new_name.startswith("_"):
new_name = "field%s" % new_name
field_notes.append("Field renamed because it started with '_'.")
if new_name.endswith("_"):
new_name = "%sfield" % new_name
field_notes.append("Field renamed because it ended with '_'.")
if keyword.iskeyword(new_name):
new_name += "_field"
field_notes.append("Field renamed because it was a Python reserved word.")
if new_name[0].isdigit():
new_name = "number_%s" % new_name
field_notes.append(
"Field renamed because it wasn't a valid Python identifier."
)
if new_name in used_column_names:
num = 0
while "%s_%d" % (new_name, num) in used_column_names:
num += 1
new_name = "%s_%d" % (new_name, num)
field_notes.append("Field renamed because of name conflict.")
if col_name != new_name and field_notes:
field_params["db_column"] = col_name
return new_name, field_params, field_notes
def get_field_type(self, connection, table_name, row):
"""
Given the database connection, the table name, and the cursor row
description, this routine will return the given field type name, as
well as any additional keyword parameters and notes for the field.
"""
field_params = {}
field_notes = []
try:
field_type = connection.introspection.get_field_type(row.type_code, row)
except KeyError:
field_type = "TextField"
field_notes.append("This field type is a guess.")
# Add max_length for all CharFields.
if field_type == "CharField" and row.internal_size:
field_params["max_length"] = int(row.internal_size)
if field_type in {"CharField", "TextField"} and row.collation:
field_params["db_collation"] = row.collation
if field_type == "DecimalField":
if row.precision is None or row.scale is None:
field_notes.append(
"max_digits and decimal_places have been guessed, as this "
"database handles decimal fields as float"
)
field_params["max_digits"] = (
row.precision if row.precision is not None else 10
)
field_params["decimal_places"] = (
row.scale if row.scale is not None else 5
)
else:
field_params["max_digits"] = row.precision
field_params["decimal_places"] = row.scale
return field_type, field_params, field_notes
def get_meta(
self, table_name, constraints, column_to_field_name, is_view, is_partition
):
"""
Return a sequence comprising the lines of code necessary
to construct the inner Meta class for the model corresponding
to the given database table name.
"""
unique_together = []
has_unsupported_constraint = False
for params in constraints.values():
if params["unique"]:
columns = params["columns"]
if None in columns:
has_unsupported_constraint = True
columns = [
x for x in columns if x is not None and x in column_to_field_name
]
if len(columns) > 1:
unique_together.append(
str(tuple(column_to_field_name[c] for c in columns))
)
if is_view:
managed_comment = " # Created from a view. Don't remove."
elif is_partition:
managed_comment = " # Created from a partition. Don't remove."
else:
managed_comment = ""
meta = [""]
if has_unsupported_constraint:
meta.append(" # A unique constraint could not be introspected.")
meta += [
" class Meta:",
" managed = False%s" % managed_comment,
" db_table = %r" % table_name,
]
if unique_together:
tup = "(" + ", ".join(unique_together) + ",)"
meta += [" unique_together = %s" % tup]
return meta
|
4fdaac81220d82fde01732c4a9497914f73598f5f37a9813e18620ebe33ac996 | from django.db import NotSupportedError
from django.db.models import Func, Index
from django.utils.functional import cached_property
__all__ = [
"BloomIndex",
"BrinIndex",
"BTreeIndex",
"GinIndex",
"GistIndex",
"HashIndex",
"SpGistIndex",
]
class PostgresIndex(Index):
@cached_property
def max_name_length(self):
# Allow an index name longer than 30 characters when the suffix is
# longer than the usual 3 character limit. The 30 character limit for
# cross-database compatibility isn't applicable to PostgreSQL-specific
# indexes.
return Index.max_name_length - len(Index.suffix) + len(self.suffix)
def create_sql(self, model, schema_editor, using="", **kwargs):
self.check_supported(schema_editor)
statement = super().create_sql(
model, schema_editor, using=" USING %s" % (using or self.suffix), **kwargs
)
with_params = self.get_with_params()
if with_params:
statement.parts["extra"] = " WITH (%s)%s" % (
", ".join(with_params),
statement.parts["extra"],
)
return statement
def check_supported(self, schema_editor):
pass
def get_with_params(self):
return []
class BloomIndex(PostgresIndex):
suffix = "bloom"
def __init__(self, *expressions, length=None, columns=(), **kwargs):
super().__init__(*expressions, **kwargs)
if len(self.fields) > 32:
raise ValueError("Bloom indexes support a maximum of 32 fields.")
if not isinstance(columns, (list, tuple)):
raise ValueError("BloomIndex.columns must be a list or tuple.")
if len(columns) > len(self.fields):
raise ValueError("BloomIndex.columns cannot have more values than fields.")
if not all(0 < col <= 4095 for col in columns):
raise ValueError(
"BloomIndex.columns must contain integers from 1 to 4095.",
)
if length is not None and not 0 < length <= 4096:
raise ValueError(
"BloomIndex.length must be None or an integer from 1 to 4096.",
)
self.length = length
self.columns = columns
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.length is not None:
kwargs["length"] = self.length
if self.columns:
kwargs["columns"] = self.columns
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.length is not None:
with_params.append("length = %d" % self.length)
if self.columns:
with_params.extend(
"col%d = %d" % (i, v) for i, v in enumerate(self.columns, start=1)
)
return with_params
class BrinIndex(PostgresIndex):
suffix = "brin"
def __init__(
self, *expressions, autosummarize=None, pages_per_range=None, **kwargs
):
if pages_per_range is not None and pages_per_range <= 0:
raise ValueError("pages_per_range must be None or a positive integer")
self.autosummarize = autosummarize
self.pages_per_range = pages_per_range
super().__init__(*expressions, **kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.autosummarize is not None:
kwargs["autosummarize"] = self.autosummarize
if self.pages_per_range is not None:
kwargs["pages_per_range"] = self.pages_per_range
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.autosummarize is not None:
with_params.append(
"autosummarize = %s" % ("on" if self.autosummarize else "off")
)
if self.pages_per_range is not None:
with_params.append("pages_per_range = %d" % self.pages_per_range)
return with_params
class BTreeIndex(PostgresIndex):
suffix = "btree"
def __init__(self, *expressions, fillfactor=None, **kwargs):
self.fillfactor = fillfactor
super().__init__(*expressions, **kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.fillfactor is not None:
kwargs["fillfactor"] = self.fillfactor
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.fillfactor is not None:
with_params.append("fillfactor = %d" % self.fillfactor)
return with_params
class GinIndex(PostgresIndex):
suffix = "gin"
def __init__(
self, *expressions, fastupdate=None, gin_pending_list_limit=None, **kwargs
):
self.fastupdate = fastupdate
self.gin_pending_list_limit = gin_pending_list_limit
super().__init__(*expressions, **kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.fastupdate is not None:
kwargs["fastupdate"] = self.fastupdate
if self.gin_pending_list_limit is not None:
kwargs["gin_pending_list_limit"] = self.gin_pending_list_limit
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.gin_pending_list_limit is not None:
with_params.append(
"gin_pending_list_limit = %d" % self.gin_pending_list_limit
)
if self.fastupdate is not None:
with_params.append("fastupdate = %s" % ("on" if self.fastupdate else "off"))
return with_params
class GistIndex(PostgresIndex):
suffix = "gist"
def __init__(self, *expressions, buffering=None, fillfactor=None, **kwargs):
self.buffering = buffering
self.fillfactor = fillfactor
super().__init__(*expressions, **kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.buffering is not None:
kwargs["buffering"] = self.buffering
if self.fillfactor is not None:
kwargs["fillfactor"] = self.fillfactor
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.buffering is not None:
with_params.append("buffering = %s" % ("on" if self.buffering else "off"))
if self.fillfactor is not None:
with_params.append("fillfactor = %d" % self.fillfactor)
return with_params
def check_supported(self, schema_editor):
if (
self.include
and not schema_editor.connection.features.supports_covering_gist_indexes
):
raise NotSupportedError("Covering GiST indexes require PostgreSQL 12+.")
class HashIndex(PostgresIndex):
suffix = "hash"
def __init__(self, *expressions, fillfactor=None, **kwargs):
self.fillfactor = fillfactor
super().__init__(*expressions, **kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.fillfactor is not None:
kwargs["fillfactor"] = self.fillfactor
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.fillfactor is not None:
with_params.append("fillfactor = %d" % self.fillfactor)
return with_params
class SpGistIndex(PostgresIndex):
suffix = "spgist"
def __init__(self, *expressions, fillfactor=None, **kwargs):
self.fillfactor = fillfactor
super().__init__(*expressions, **kwargs)
def deconstruct(self):
path, args, kwargs = super().deconstruct()
if self.fillfactor is not None:
kwargs["fillfactor"] = self.fillfactor
return path, args, kwargs
def get_with_params(self):
with_params = []
if self.fillfactor is not None:
with_params.append("fillfactor = %d" % self.fillfactor)
return with_params
def check_supported(self, schema_editor):
if (
self.include
and not schema_editor.connection.features.supports_covering_spgist_indexes
):
raise NotSupportedError("Covering SP-GiST indexes require PostgreSQL 14+.")
class OpClass(Func):
template = "%(expressions)s %(name)s"
def __init__(self, expression, name):
super().__init__(expression, name=name)
|
f026eb74ba32e8d9c27719b39b80444d31c866f5734bae781676eede0eb524ac | import warnings
from urllib.parse import urlparse, urlunparse
from django.conf import settings
# Avoid shadowing the login() and logout() views below.
from django.contrib.auth import REDIRECT_FIELD_NAME, get_user_model
from django.contrib.auth import login as auth_login
from django.contrib.auth import logout as auth_logout
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import (
AuthenticationForm,
PasswordChangeForm,
PasswordResetForm,
SetPasswordForm,
)
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.http import HttpResponseRedirect, QueryDict
from django.shortcuts import resolve_url
from django.urls import reverse_lazy
from django.utils.decorators import method_decorator
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.http import url_has_allowed_host_and_scheme, urlsafe_base64_decode
from django.utils.translation import gettext_lazy as _
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from django.views.generic.base import TemplateView
from django.views.generic.edit import FormView
UserModel = get_user_model()
class RedirectURLMixin:
next_page = None
redirect_field_name = REDIRECT_FIELD_NAME
success_url_allowed_hosts = set()
def get_success_url(self):
return self.get_redirect_url() or self.get_default_redirect_url()
def get_redirect_url(self):
"""Return the user-originating redirect URL if it's safe."""
redirect_to = self.request.POST.get(
self.redirect_field_name, self.request.GET.get(self.redirect_field_name)
)
url_is_safe = url_has_allowed_host_and_scheme(
url=redirect_to,
allowed_hosts=self.get_success_url_allowed_hosts(),
require_https=self.request.is_secure(),
)
return redirect_to if url_is_safe else ""
def get_success_url_allowed_hosts(self):
return {self.request.get_host(), *self.success_url_allowed_hosts}
def get_default_redirect_url(self):
"""Return the default redirect URL."""
if self.next_page:
return resolve_url(self.next_page)
raise ImproperlyConfigured("No URL to redirect to. Provide a next_page.")
class LoginView(RedirectURLMixin, FormView):
"""
Display the login form and handle the login action.
"""
form_class = AuthenticationForm
authentication_form = None
template_name = "registration/login.html"
redirect_authenticated_user = False
extra_context = None
@method_decorator(sensitive_post_parameters())
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
if self.redirect_authenticated_user and self.request.user.is_authenticated:
redirect_to = self.get_success_url()
if redirect_to == self.request.path:
raise ValueError(
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page."
)
return HttpResponseRedirect(redirect_to)
return super().dispatch(request, *args, **kwargs)
def get_default_redirect_url(self):
"""Return the default redirect URL."""
if self.next_page:
return resolve_url(self.next_page)
else:
return resolve_url(settings.LOGIN_REDIRECT_URL)
def get_form_class(self):
return self.authentication_form or self.form_class
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["request"] = self.request
return kwargs
def form_valid(self, form):
"""Security check complete. Log the user in."""
auth_login(self.request, form.get_user())
return HttpResponseRedirect(self.get_success_url())
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
current_site = get_current_site(self.request)
context.update(
{
self.redirect_field_name: self.get_redirect_url(),
"site": current_site,
"site_name": current_site.name,
**(self.extra_context or {}),
}
)
return context
class LogoutView(RedirectURLMixin, TemplateView):
"""
Log out the user and display the 'You are logged out' message.
"""
# RemovedInDjango50Warning: when the deprecation ends, remove "get" and
# "head" from http_method_names.
http_method_names = ["get", "head", "post", "options"]
template_name = "registration/logged_out.html"
extra_context = None
# RemovedInDjango50Warning: when the deprecation ends, move
# @method_decorator(csrf_protect) from post() to dispatch().
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
if request.method.lower() == "get":
warnings.warn(
"Log out via GET requests is deprecated and will be removed in Django "
"5.0. Use POST requests for logging out.",
RemovedInDjango50Warning,
)
return super().dispatch(request, *args, **kwargs)
@method_decorator(csrf_protect)
def post(self, request, *args, **kwargs):
"""Logout may be done via POST."""
auth_logout(request)
redirect_to = self.get_success_url()
if redirect_to != request.get_full_path():
# Redirect to target page once the session has been cleared.
return HttpResponseRedirect(redirect_to)
return super().get(request, *args, **kwargs)
# RemovedInDjango50Warning.
get = post
def get_default_redirect_url(self):
"""Return the default redirect URL."""
if self.next_page:
return resolve_url(self.next_page)
elif settings.LOGOUT_REDIRECT_URL:
return resolve_url(settings.LOGOUT_REDIRECT_URL)
else:
return self.request.path
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
current_site = get_current_site(self.request)
context.update(
{
"site": current_site,
"site_name": current_site.name,
"title": _("Logged out"),
"subtitle": None,
**(self.extra_context or {}),
}
)
return context
def logout_then_login(request, login_url=None):
"""
Log out the user if they are logged in. Then redirect to the login page.
"""
login_url = resolve_url(login_url or settings.LOGIN_URL)
return LogoutView.as_view(next_page=login_url)(request)
def redirect_to_login(next, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Redirect the user to the login page, passing the given 'next' page.
"""
resolved_url = resolve_url(login_url or settings.LOGIN_URL)
login_url_parts = list(urlparse(resolved_url))
if redirect_field_name:
querystring = QueryDict(login_url_parts[4], mutable=True)
querystring[redirect_field_name] = next
login_url_parts[4] = querystring.urlencode(safe="/")
return HttpResponseRedirect(urlunparse(login_url_parts))
# Class-based password reset views
# - PasswordResetView sends the mail
# - PasswordResetDoneView shows a success message for the above
# - PasswordResetConfirmView checks the link the user clicked and
# prompts for a new password
# - PasswordResetCompleteView shows a success message for the above
class PasswordContextMixin:
extra_context = None
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update(
{"title": self.title, "subtitle": None, **(self.extra_context or {})}
)
return context
class PasswordResetView(PasswordContextMixin, FormView):
email_template_name = "registration/password_reset_email.html"
extra_email_context = None
form_class = PasswordResetForm
from_email = None
html_email_template_name = None
subject_template_name = "registration/password_reset_subject.txt"
success_url = reverse_lazy("password_reset_done")
template_name = "registration/password_reset_form.html"
title = _("Password reset")
token_generator = default_token_generator
@method_decorator(csrf_protect)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
def form_valid(self, form):
opts = {
"use_https": self.request.is_secure(),
"token_generator": self.token_generator,
"from_email": self.from_email,
"email_template_name": self.email_template_name,
"subject_template_name": self.subject_template_name,
"request": self.request,
"html_email_template_name": self.html_email_template_name,
"extra_email_context": self.extra_email_context,
}
form.save(**opts)
return super().form_valid(form)
INTERNAL_RESET_SESSION_TOKEN = "_password_reset_token"
class PasswordResetDoneView(PasswordContextMixin, TemplateView):
template_name = "registration/password_reset_done.html"
title = _("Password reset sent")
class PasswordResetConfirmView(PasswordContextMixin, FormView):
form_class = SetPasswordForm
post_reset_login = False
post_reset_login_backend = None
reset_url_token = "set-password"
success_url = reverse_lazy("password_reset_complete")
template_name = "registration/password_reset_confirm.html"
title = _("Enter new password")
token_generator = default_token_generator
@method_decorator(sensitive_post_parameters())
@method_decorator(never_cache)
def dispatch(self, *args, **kwargs):
if "uidb64" not in kwargs or "token" not in kwargs:
raise ImproperlyConfigured(
"The URL path must contain 'uidb64' and 'token' parameters."
)
self.validlink = False
self.user = self.get_user(kwargs["uidb64"])
if self.user is not None:
token = kwargs["token"]
if token == self.reset_url_token:
session_token = self.request.session.get(INTERNAL_RESET_SESSION_TOKEN)
if self.token_generator.check_token(self.user, session_token):
# If the token is valid, display the password reset form.
self.validlink = True
return super().dispatch(*args, **kwargs)
else:
if self.token_generator.check_token(self.user, token):
# Store the token in the session and redirect to the
# password reset form at a URL without the token. That
# avoids the possibility of leaking the token in the
# HTTP Referer header.
self.request.session[INTERNAL_RESET_SESSION_TOKEN] = token
redirect_url = self.request.path.replace(
token, self.reset_url_token
)
return HttpResponseRedirect(redirect_url)
# Display the "Password reset unsuccessful" page.
return self.render_to_response(self.get_context_data())
def get_user(self, uidb64):
try:
# urlsafe_base64_decode() decodes to bytestring
uid = urlsafe_base64_decode(uidb64).decode()
user = UserModel._default_manager.get(pk=uid)
except (
TypeError,
ValueError,
OverflowError,
UserModel.DoesNotExist,
ValidationError,
):
user = None
return user
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["user"] = self.user
return kwargs
def form_valid(self, form):
user = form.save()
del self.request.session[INTERNAL_RESET_SESSION_TOKEN]
if self.post_reset_login:
auth_login(self.request, user, self.post_reset_login_backend)
return super().form_valid(form)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
if self.validlink:
context["validlink"] = True
else:
context.update(
{
"form": None,
"title": _("Password reset unsuccessful"),
"validlink": False,
}
)
return context
class PasswordResetCompleteView(PasswordContextMixin, TemplateView):
template_name = "registration/password_reset_complete.html"
title = _("Password reset complete")
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["login_url"] = resolve_url(settings.LOGIN_URL)
return context
class PasswordChangeView(PasswordContextMixin, FormView):
form_class = PasswordChangeForm
success_url = reverse_lazy("password_change_done")
template_name = "registration/password_change_form.html"
title = _("Password change")
@method_decorator(sensitive_post_parameters())
@method_decorator(csrf_protect)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["user"] = self.request.user
return kwargs
def form_valid(self, form):
form.save()
# Updating the password logs out all other sessions for the user
# except the current one.
update_session_auth_hash(self.request, form.user)
return super().form_valid(form)
class PasswordChangeDoneView(PasswordContextMixin, TemplateView):
template_name = "registration/password_change_done.html"
title = _("Password change successful")
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
|
6dff8364153b858ffeef3764a40517e84e42e682b8513648ef5ffd4a9f2ebbb6 | import collections
from itertools import chain
from django.apps import apps
from django.conf import settings
from django.contrib.admin.utils import NotRelationField, flatten, get_fields_from_path
from django.core import checks
from django.core.exceptions import FieldDoesNotExist
from django.db import models
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import Combinable
from django.forms.models import BaseModelForm, BaseModelFormSet, _get_foreign_key
from django.template import engines
from django.template.backends.django import DjangoTemplates
from django.utils.module_loading import import_string
def _issubclass(cls, classinfo):
"""
issubclass() variant that doesn't raise an exception if cls isn't a
class.
"""
try:
return issubclass(cls, classinfo)
except TypeError:
return False
def _contains_subclass(class_path, candidate_paths):
"""
Return whether or not a dotted class path (or a subclass of that class) is
found in a list of candidate paths.
"""
cls = import_string(class_path)
for path in candidate_paths:
try:
candidate_cls = import_string(path)
except ImportError:
# ImportErrors are raised elsewhere.
continue
if _issubclass(candidate_cls, cls):
return True
return False
def check_admin_app(app_configs, **kwargs):
from django.contrib.admin.sites import all_sites
errors = []
for site in all_sites:
errors.extend(site.check(app_configs))
return errors
def check_dependencies(**kwargs):
"""
Check that the admin's dependencies are correctly installed.
"""
from django.contrib.admin.sites import all_sites
if not apps.is_installed("django.contrib.admin"):
return []
errors = []
app_dependencies = (
("django.contrib.contenttypes", 401),
("django.contrib.auth", 405),
("django.contrib.messages", 406),
)
for app_name, error_code in app_dependencies:
if not apps.is_installed(app_name):
errors.append(
checks.Error(
"'%s' must be in INSTALLED_APPS in order to use the admin "
"application." % app_name,
id="admin.E%d" % error_code,
)
)
for engine in engines.all():
if isinstance(engine, DjangoTemplates):
django_templates_instance = engine.engine
break
else:
django_templates_instance = None
if not django_templates_instance:
errors.append(
checks.Error(
"A 'django.template.backends.django.DjangoTemplates' instance "
"must be configured in TEMPLATES in order to use the admin "
"application.",
id="admin.E403",
)
)
else:
if (
"django.contrib.auth.context_processors.auth"
not in django_templates_instance.context_processors
and _contains_subclass(
"django.contrib.auth.backends.ModelBackend",
settings.AUTHENTICATION_BACKENDS,
)
):
errors.append(
checks.Error(
"'django.contrib.auth.context_processors.auth' must be "
"enabled in DjangoTemplates (TEMPLATES) if using the default "
"auth backend in order to use the admin application.",
id="admin.E402",
)
)
if (
"django.contrib.messages.context_processors.messages"
not in django_templates_instance.context_processors
):
errors.append(
checks.Error(
"'django.contrib.messages.context_processors.messages' must "
"be enabled in DjangoTemplates (TEMPLATES) in order to use "
"the admin application.",
id="admin.E404",
)
)
sidebar_enabled = any(site.enable_nav_sidebar for site in all_sites)
if (
sidebar_enabled
and "django.template.context_processors.request"
not in django_templates_instance.context_processors
):
errors.append(
checks.Warning(
"'django.template.context_processors.request' must be enabled "
"in DjangoTemplates (TEMPLATES) in order to use the admin "
"navigation sidebar.",
id="admin.W411",
)
)
if not _contains_subclass(
"django.contrib.auth.middleware.AuthenticationMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.auth.middleware.AuthenticationMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
id="admin.E408",
)
)
if not _contains_subclass(
"django.contrib.messages.middleware.MessageMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.messages.middleware.MessageMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
id="admin.E409",
)
)
if not _contains_subclass(
"django.contrib.sessions.middleware.SessionMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.sessions.middleware.SessionMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
hint=(
"Insert "
"'django.contrib.sessions.middleware.SessionMiddleware' "
"before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
),
id="admin.E410",
)
)
return errors
class BaseModelAdminChecks:
def check(self, admin_obj, **kwargs):
return [
*self._check_autocomplete_fields(admin_obj),
*self._check_raw_id_fields(admin_obj),
*self._check_fields(admin_obj),
*self._check_fieldsets(admin_obj),
*self._check_exclude(admin_obj),
*self._check_form(admin_obj),
*self._check_filter_vertical(admin_obj),
*self._check_filter_horizontal(admin_obj),
*self._check_radio_fields(admin_obj),
*self._check_prepopulated_fields(admin_obj),
*self._check_view_on_site_url(admin_obj),
*self._check_ordering(admin_obj),
*self._check_readonly_fields(admin_obj),
]
def _check_autocomplete_fields(self, obj):
"""
Check that `autocomplete_fields` is a list or tuple of model fields.
"""
if not isinstance(obj.autocomplete_fields, (list, tuple)):
return must_be(
"a list or tuple",
option="autocomplete_fields",
obj=obj,
id="admin.E036",
)
else:
return list(
chain.from_iterable(
[
self._check_autocomplete_fields_item(
obj, field_name, "autocomplete_fields[%d]" % index
)
for index, field_name in enumerate(obj.autocomplete_fields)
]
)
)
def _check_autocomplete_fields_item(self, obj, field_name, label):
"""
Check that an item in `autocomplete_fields` is a ForeignKey or a
ManyToManyField and that the item has a related ModelAdmin with
search_fields defined.
"""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E037"
)
else:
if not field.many_to_many and not isinstance(field, models.ForeignKey):
return must_be(
"a foreign key or a many-to-many field",
option=label,
obj=obj,
id="admin.E038",
)
related_admin = obj.admin_site._registry.get(field.remote_field.model)
if related_admin is None:
return [
checks.Error(
'An admin for model "%s" has to be registered '
"to be referenced by %s.autocomplete_fields."
% (
field.remote_field.model.__name__,
type(obj).__name__,
),
obj=obj.__class__,
id="admin.E039",
)
]
elif not related_admin.search_fields:
return [
checks.Error(
'%s must define "search_fields", because it\'s '
"referenced by %s.autocomplete_fields."
% (
related_admin.__class__.__name__,
type(obj).__name__,
),
obj=obj.__class__,
id="admin.E040",
)
]
return []
def _check_raw_id_fields(self, obj):
"""Check that `raw_id_fields` only contains field names that are listed
on the model."""
if not isinstance(obj.raw_id_fields, (list, tuple)):
return must_be(
"a list or tuple", option="raw_id_fields", obj=obj, id="admin.E001"
)
else:
return list(
chain.from_iterable(
self._check_raw_id_fields_item(
obj, field_name, "raw_id_fields[%d]" % index
)
for index, field_name in enumerate(obj.raw_id_fields)
)
)
def _check_raw_id_fields_item(self, obj, field_name, label):
"""Check an item of `raw_id_fields`, i.e. check that field named
`field_name` exists in model `model` and is a ForeignKey or a
ManyToManyField."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E002"
)
else:
# Using attname is not supported.
if field.name != field_name:
return refer_to_missing_field(
field=field_name,
option=label,
obj=obj,
id="admin.E002",
)
if not field.many_to_many and not isinstance(field, models.ForeignKey):
return must_be(
"a foreign key or a many-to-many field",
option=label,
obj=obj,
id="admin.E003",
)
else:
return []
def _check_fields(self, obj):
"""Check that `fields` only refer to existing fields, doesn't contain
duplicates. Check if at most one of `fields` and `fieldsets` is defined.
"""
if obj.fields is None:
return []
elif not isinstance(obj.fields, (list, tuple)):
return must_be("a list or tuple", option="fields", obj=obj, id="admin.E004")
elif obj.fieldsets:
return [
checks.Error(
"Both 'fieldsets' and 'fields' are specified.",
obj=obj.__class__,
id="admin.E005",
)
]
fields = flatten(obj.fields)
if len(fields) != len(set(fields)):
return [
checks.Error(
"The value of 'fields' contains duplicate field(s).",
obj=obj.__class__,
id="admin.E006",
)
]
return list(
chain.from_iterable(
self._check_field_spec(obj, field_name, "fields")
for field_name in obj.fields
)
)
def _check_fieldsets(self, obj):
"""Check that fieldsets is properly formatted and doesn't contain
duplicates."""
if obj.fieldsets is None:
return []
elif not isinstance(obj.fieldsets, (list, tuple)):
return must_be(
"a list or tuple", option="fieldsets", obj=obj, id="admin.E007"
)
else:
seen_fields = []
return list(
chain.from_iterable(
self._check_fieldsets_item(
obj, fieldset, "fieldsets[%d]" % index, seen_fields
)
for index, fieldset in enumerate(obj.fieldsets)
)
)
def _check_fieldsets_item(self, obj, fieldset, label, seen_fields):
"""Check an item of `fieldsets`, i.e. check that this is a pair of a
set name and a dictionary containing "fields" key."""
if not isinstance(fieldset, (list, tuple)):
return must_be("a list or tuple", option=label, obj=obj, id="admin.E008")
elif len(fieldset) != 2:
return must_be("of length 2", option=label, obj=obj, id="admin.E009")
elif not isinstance(fieldset[1], dict):
return must_be(
"a dictionary", option="%s[1]" % label, obj=obj, id="admin.E010"
)
elif "fields" not in fieldset[1]:
return [
checks.Error(
"The value of '%s[1]' must contain the key 'fields'." % label,
obj=obj.__class__,
id="admin.E011",
)
]
elif not isinstance(fieldset[1]["fields"], (list, tuple)):
return must_be(
"a list or tuple",
option="%s[1]['fields']" % label,
obj=obj,
id="admin.E008",
)
seen_fields.extend(flatten(fieldset[1]["fields"]))
if len(seen_fields) != len(set(seen_fields)):
return [
checks.Error(
"There are duplicate field(s) in '%s[1]'." % label,
obj=obj.__class__,
id="admin.E012",
)
]
return list(
chain.from_iterable(
self._check_field_spec(obj, fieldset_fields, '%s[1]["fields"]' % label)
for fieldset_fields in fieldset[1]["fields"]
)
)
def _check_field_spec(self, obj, fields, label):
"""`fields` should be an item of `fields` or an item of
fieldset[1]['fields'] for any `fieldset` in `fieldsets`. It should be a
field name or a tuple of field names."""
if isinstance(fields, tuple):
return list(
chain.from_iterable(
self._check_field_spec_item(
obj, field_name, "%s[%d]" % (label, index)
)
for index, field_name in enumerate(fields)
)
)
else:
return self._check_field_spec_item(obj, fields, label)
def _check_field_spec_item(self, obj, field_name, label):
if field_name in obj.readonly_fields:
# Stuff can be put in fields that isn't actually a model field if
# it's in readonly_fields, readonly_fields will handle the
# validation of such things.
return []
else:
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
# If we can't find a field on the model that matches, it could
# be an extra field on the form.
return []
else:
if (
isinstance(field, models.ManyToManyField)
and not field.remote_field.through._meta.auto_created
):
return [
checks.Error(
"The value of '%s' cannot include the ManyToManyField "
"'%s', because that field manually specifies a "
"relationship model." % (label, field_name),
obj=obj.__class__,
id="admin.E013",
)
]
else:
return []
def _check_exclude(self, obj):
"""Check that exclude is a sequence without duplicates."""
if obj.exclude is None: # default value is None
return []
elif not isinstance(obj.exclude, (list, tuple)):
return must_be(
"a list or tuple", option="exclude", obj=obj, id="admin.E014"
)
elif len(obj.exclude) > len(set(obj.exclude)):
return [
checks.Error(
"The value of 'exclude' contains duplicate field(s).",
obj=obj.__class__,
id="admin.E015",
)
]
else:
return []
def _check_form(self, obj):
"""Check that form subclasses BaseModelForm."""
if not _issubclass(obj.form, BaseModelForm):
return must_inherit_from(
parent="BaseModelForm", option="form", obj=obj, id="admin.E016"
)
else:
return []
def _check_filter_vertical(self, obj):
"""Check that filter_vertical is a sequence of field names."""
if not isinstance(obj.filter_vertical, (list, tuple)):
return must_be(
"a list or tuple", option="filter_vertical", obj=obj, id="admin.E017"
)
else:
return list(
chain.from_iterable(
self._check_filter_item(
obj, field_name, "filter_vertical[%d]" % index
)
for index, field_name in enumerate(obj.filter_vertical)
)
)
def _check_filter_horizontal(self, obj):
"""Check that filter_horizontal is a sequence of field names."""
if not isinstance(obj.filter_horizontal, (list, tuple)):
return must_be(
"a list or tuple", option="filter_horizontal", obj=obj, id="admin.E018"
)
else:
return list(
chain.from_iterable(
self._check_filter_item(
obj, field_name, "filter_horizontal[%d]" % index
)
for index, field_name in enumerate(obj.filter_horizontal)
)
)
def _check_filter_item(self, obj, field_name, label):
"""Check one item of `filter_vertical` or `filter_horizontal`, i.e.
check that given field exists and is a ManyToManyField."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E019"
)
else:
if not field.many_to_many:
return must_be(
"a many-to-many field", option=label, obj=obj, id="admin.E020"
)
else:
return []
def _check_radio_fields(self, obj):
"""Check that `radio_fields` is a dictionary."""
if not isinstance(obj.radio_fields, dict):
return must_be(
"a dictionary", option="radio_fields", obj=obj, id="admin.E021"
)
else:
return list(
chain.from_iterable(
self._check_radio_fields_key(obj, field_name, "radio_fields")
+ self._check_radio_fields_value(
obj, val, 'radio_fields["%s"]' % field_name
)
for field_name, val in obj.radio_fields.items()
)
)
def _check_radio_fields_key(self, obj, field_name, label):
"""Check that a key of `radio_fields` dictionary is name of existing
field and that the field is a ForeignKey or has `choices` defined."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E022"
)
else:
if not (isinstance(field, models.ForeignKey) or field.choices):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not an "
"instance of ForeignKey, and does not have a 'choices' "
"definition." % (label, field_name),
obj=obj.__class__,
id="admin.E023",
)
]
else:
return []
def _check_radio_fields_value(self, obj, val, label):
"""Check type of a value of `radio_fields` dictionary."""
from django.contrib.admin.options import HORIZONTAL, VERTICAL
if val not in (HORIZONTAL, VERTICAL):
return [
checks.Error(
"The value of '%s' must be either admin.HORIZONTAL or "
"admin.VERTICAL." % label,
obj=obj.__class__,
id="admin.E024",
)
]
else:
return []
def _check_view_on_site_url(self, obj):
if not callable(obj.view_on_site) and not isinstance(obj.view_on_site, bool):
return [
checks.Error(
"The value of 'view_on_site' must be a callable or a boolean "
"value.",
obj=obj.__class__,
id="admin.E025",
)
]
else:
return []
def _check_prepopulated_fields(self, obj):
"""Check that `prepopulated_fields` is a dictionary containing allowed
field types."""
if not isinstance(obj.prepopulated_fields, dict):
return must_be(
"a dictionary", option="prepopulated_fields", obj=obj, id="admin.E026"
)
else:
return list(
chain.from_iterable(
self._check_prepopulated_fields_key(
obj, field_name, "prepopulated_fields"
)
+ self._check_prepopulated_fields_value(
obj, val, 'prepopulated_fields["%s"]' % field_name
)
for field_name, val in obj.prepopulated_fields.items()
)
)
def _check_prepopulated_fields_key(self, obj, field_name, label):
"""Check a key of `prepopulated_fields` dictionary, i.e. check that it
is a name of existing field and the field is one of the allowed types.
"""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E027"
)
else:
if isinstance(
field, (models.DateTimeField, models.ForeignKey, models.ManyToManyField)
):
return [
checks.Error(
"The value of '%s' refers to '%s', which must not be a "
"DateTimeField, a ForeignKey, a OneToOneField, or a "
"ManyToManyField." % (label, field_name),
obj=obj.__class__,
id="admin.E028",
)
]
else:
return []
def _check_prepopulated_fields_value(self, obj, val, label):
"""Check a value of `prepopulated_fields` dictionary, i.e. it's an
iterable of existing fields."""
if not isinstance(val, (list, tuple)):
return must_be("a list or tuple", option=label, obj=obj, id="admin.E029")
else:
return list(
chain.from_iterable(
self._check_prepopulated_fields_value_item(
obj, subfield_name, "%s[%r]" % (label, index)
)
for index, subfield_name in enumerate(val)
)
)
def _check_prepopulated_fields_value_item(self, obj, field_name, label):
"""For `prepopulated_fields` equal to {"slug": ("title",)},
`field_name` is "title"."""
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E030"
)
else:
return []
def _check_ordering(self, obj):
"""Check that ordering refers to existing fields or is random."""
# ordering = None
if obj.ordering is None: # The default value is None
return []
elif not isinstance(obj.ordering, (list, tuple)):
return must_be(
"a list or tuple", option="ordering", obj=obj, id="admin.E031"
)
else:
return list(
chain.from_iterable(
self._check_ordering_item(obj, field_name, "ordering[%d]" % index)
for index, field_name in enumerate(obj.ordering)
)
)
def _check_ordering_item(self, obj, field_name, label):
"""Check that `ordering` refers to existing fields."""
if isinstance(field_name, (Combinable, models.OrderBy)):
if not isinstance(field_name, models.OrderBy):
field_name = field_name.asc()
if isinstance(field_name.expression, models.F):
field_name = field_name.expression.name
else:
return []
if field_name == "?" and len(obj.ordering) != 1:
return [
checks.Error(
"The value of 'ordering' has the random ordering marker '?', "
"but contains other fields as well.",
hint='Either remove the "?", or remove the other fields.',
obj=obj.__class__,
id="admin.E032",
)
]
elif field_name == "?":
return []
elif LOOKUP_SEP in field_name:
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
return []
else:
if field_name.startswith("-"):
field_name = field_name[1:]
if field_name == "pk":
return []
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E033"
)
else:
return []
def _check_readonly_fields(self, obj):
"""Check that readonly_fields refers to proper attribute or field."""
if obj.readonly_fields == ():
return []
elif not isinstance(obj.readonly_fields, (list, tuple)):
return must_be(
"a list or tuple", option="readonly_fields", obj=obj, id="admin.E034"
)
else:
return list(
chain.from_iterable(
self._check_readonly_fields_item(
obj, field_name, "readonly_fields[%d]" % index
)
for index, field_name in enumerate(obj.readonly_fields)
)
)
def _check_readonly_fields_item(self, obj, field_name, label):
if callable(field_name):
return []
elif hasattr(obj, field_name):
return []
elif hasattr(obj.model, field_name):
return []
else:
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return [
checks.Error(
"The value of '%s' is not a callable, an attribute of "
"'%s', or an attribute of '%s'."
% (
label,
obj.__class__.__name__,
obj.model._meta.label,
),
obj=obj.__class__,
id="admin.E035",
)
]
else:
return []
class ModelAdminChecks(BaseModelAdminChecks):
def check(self, admin_obj, **kwargs):
return [
*super().check(admin_obj),
*self._check_save_as(admin_obj),
*self._check_save_on_top(admin_obj),
*self._check_inlines(admin_obj),
*self._check_list_display(admin_obj),
*self._check_list_display_links(admin_obj),
*self._check_list_filter(admin_obj),
*self._check_list_select_related(admin_obj),
*self._check_list_per_page(admin_obj),
*self._check_list_max_show_all(admin_obj),
*self._check_list_editable(admin_obj),
*self._check_search_fields(admin_obj),
*self._check_date_hierarchy(admin_obj),
*self._check_action_permission_methods(admin_obj),
*self._check_actions_uniqueness(admin_obj),
]
def _check_save_as(self, obj):
"""Check save_as is a boolean."""
if not isinstance(obj.save_as, bool):
return must_be("a boolean", option="save_as", obj=obj, id="admin.E101")
else:
return []
def _check_save_on_top(self, obj):
"""Check save_on_top is a boolean."""
if not isinstance(obj.save_on_top, bool):
return must_be("a boolean", option="save_on_top", obj=obj, id="admin.E102")
else:
return []
def _check_inlines(self, obj):
"""Check all inline model admin classes."""
if not isinstance(obj.inlines, (list, tuple)):
return must_be(
"a list or tuple", option="inlines", obj=obj, id="admin.E103"
)
else:
return list(
chain.from_iterable(
self._check_inlines_item(obj, item, "inlines[%d]" % index)
for index, item in enumerate(obj.inlines)
)
)
def _check_inlines_item(self, obj, inline, label):
"""Check one inline model admin."""
try:
inline_label = inline.__module__ + "." + inline.__name__
except AttributeError:
return [
checks.Error(
"'%s' must inherit from 'InlineModelAdmin'." % obj,
obj=obj.__class__,
id="admin.E104",
)
]
from django.contrib.admin.options import InlineModelAdmin
if not _issubclass(inline, InlineModelAdmin):
return [
checks.Error(
"'%s' must inherit from 'InlineModelAdmin'." % inline_label,
obj=obj.__class__,
id="admin.E104",
)
]
elif not inline.model:
return [
checks.Error(
"'%s' must have a 'model' attribute." % inline_label,
obj=obj.__class__,
id="admin.E105",
)
]
elif not _issubclass(inline.model, models.Model):
return must_be(
"a Model", option="%s.model" % inline_label, obj=obj, id="admin.E106"
)
else:
return inline(obj.model, obj.admin_site).check()
def _check_list_display(self, obj):
"""Check that list_display only contains fields or usable attributes."""
if not isinstance(obj.list_display, (list, tuple)):
return must_be(
"a list or tuple", option="list_display", obj=obj, id="admin.E107"
)
else:
return list(
chain.from_iterable(
self._check_list_display_item(obj, item, "list_display[%d]" % index)
for index, item in enumerate(obj.list_display)
)
)
def _check_list_display_item(self, obj, item, label):
if callable(item):
return []
elif hasattr(obj, item):
return []
try:
field = obj.model._meta.get_field(item)
except FieldDoesNotExist:
try:
field = getattr(obj.model, item)
except AttributeError:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a "
"callable, an attribute of '%s', or an attribute or "
"method on '%s'."
% (
label,
item,
obj.__class__.__name__,
obj.model._meta.label,
),
obj=obj.__class__,
id="admin.E108",
)
]
if isinstance(field, models.ManyToManyField):
return [
checks.Error(
"The value of '%s' must not be a ManyToManyField." % label,
obj=obj.__class__,
id="admin.E109",
)
]
return []
def _check_list_display_links(self, obj):
"""Check that list_display_links is a unique subset of list_display."""
from django.contrib.admin.options import ModelAdmin
if obj.list_display_links is None:
return []
elif not isinstance(obj.list_display_links, (list, tuple)):
return must_be(
"a list, a tuple, or None",
option="list_display_links",
obj=obj,
id="admin.E110",
)
# Check only if ModelAdmin.get_list_display() isn't overridden.
elif obj.get_list_display.__func__ is ModelAdmin.get_list_display:
return list(
chain.from_iterable(
self._check_list_display_links_item(
obj, field_name, "list_display_links[%d]" % index
)
for index, field_name in enumerate(obj.list_display_links)
)
)
return []
def _check_list_display_links_item(self, obj, field_name, label):
if field_name not in obj.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not defined in "
"'list_display'." % (label, field_name),
obj=obj.__class__,
id="admin.E111",
)
]
else:
return []
def _check_list_filter(self, obj):
if not isinstance(obj.list_filter, (list, tuple)):
return must_be(
"a list or tuple", option="list_filter", obj=obj, id="admin.E112"
)
else:
return list(
chain.from_iterable(
self._check_list_filter_item(obj, item, "list_filter[%d]" % index)
for index, item in enumerate(obj.list_filter)
)
)
def _check_list_filter_item(self, obj, item, label):
"""
Check one item of `list_filter`, i.e. check if it is one of three options:
1. 'field' -- a basic field filter, possibly w/ relationships (e.g.
'field__rel')
2. ('field', SomeFieldListFilter) - a field-based list filter class
3. SomeListFilter - a non-field list filter class
"""
from django.contrib.admin import FieldListFilter, ListFilter
if callable(item) and not isinstance(item, models.Field):
# If item is option 3, it should be a ListFilter...
if not _issubclass(item, ListFilter):
return must_inherit_from(
parent="ListFilter", option=label, obj=obj, id="admin.E113"
)
# ... but not a FieldListFilter.
elif issubclass(item, FieldListFilter):
return [
checks.Error(
"The value of '%s' must not inherit from 'FieldListFilter'."
% label,
obj=obj.__class__,
id="admin.E114",
)
]
else:
return []
elif isinstance(item, (tuple, list)):
# item is option #2
field, list_filter_class = item
if not _issubclass(list_filter_class, FieldListFilter):
return must_inherit_from(
parent="FieldListFilter",
option="%s[1]" % label,
obj=obj,
id="admin.E115",
)
else:
return []
else:
# item is option #1
field = item
# Validate the field string
try:
get_fields_from_path(obj.model, field)
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of '%s' refers to '%s', which does not refer to a "
"Field." % (label, field),
obj=obj.__class__,
id="admin.E116",
)
]
else:
return []
def _check_list_select_related(self, obj):
"""Check that list_select_related is a boolean, a list or a tuple."""
if not isinstance(obj.list_select_related, (bool, list, tuple)):
return must_be(
"a boolean, tuple or list",
option="list_select_related",
obj=obj,
id="admin.E117",
)
else:
return []
def _check_list_per_page(self, obj):
"""Check that list_per_page is an integer."""
if not isinstance(obj.list_per_page, int):
return must_be(
"an integer", option="list_per_page", obj=obj, id="admin.E118"
)
else:
return []
def _check_list_max_show_all(self, obj):
"""Check that list_max_show_all is an integer."""
if not isinstance(obj.list_max_show_all, int):
return must_be(
"an integer", option="list_max_show_all", obj=obj, id="admin.E119"
)
else:
return []
def _check_list_editable(self, obj):
"""Check that list_editable is a sequence of editable fields from
list_display without first element."""
if not isinstance(obj.list_editable, (list, tuple)):
return must_be(
"a list or tuple", option="list_editable", obj=obj, id="admin.E120"
)
else:
return list(
chain.from_iterable(
self._check_list_editable_item(
obj, item, "list_editable[%d]" % index
)
for index, item in enumerate(obj.list_editable)
)
)
def _check_list_editable_item(self, obj, field_name, label):
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E121"
)
else:
if field_name not in obj.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not "
"contained in 'list_display'." % (label, field_name),
obj=obj.__class__,
id="admin.E122",
)
]
elif obj.list_display_links and field_name in obj.list_display_links:
return [
checks.Error(
"The value of '%s' cannot be in both 'list_editable' and "
"'list_display_links'." % field_name,
obj=obj.__class__,
id="admin.E123",
)
]
# If list_display[0] is in list_editable, check that
# list_display_links is set. See #22792 and #26229 for use cases.
elif (
obj.list_display[0] == field_name
and not obj.list_display_links
and obj.list_display_links is not None
):
return [
checks.Error(
"The value of '%s' refers to the first field in 'list_display' "
"('%s'), which cannot be used unless 'list_display_links' is "
"set." % (label, obj.list_display[0]),
obj=obj.__class__,
id="admin.E124",
)
]
elif not field.editable or field.primary_key:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not editable "
"through the admin." % (label, field_name),
obj=obj.__class__,
id="admin.E125",
)
]
else:
return []
def _check_search_fields(self, obj):
"""Check search_fields is a sequence."""
if not isinstance(obj.search_fields, (list, tuple)):
return must_be(
"a list or tuple", option="search_fields", obj=obj, id="admin.E126"
)
else:
return []
def _check_date_hierarchy(self, obj):
"""Check that date_hierarchy refers to DateField or DateTimeField."""
if obj.date_hierarchy is None:
return []
else:
try:
field = get_fields_from_path(obj.model, obj.date_hierarchy)[-1]
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of 'date_hierarchy' refers to '%s', which "
"does not refer to a Field." % obj.date_hierarchy,
obj=obj.__class__,
id="admin.E127",
)
]
else:
if not isinstance(field, (models.DateField, models.DateTimeField)):
return must_be(
"a DateField or DateTimeField",
option="date_hierarchy",
obj=obj,
id="admin.E128",
)
else:
return []
def _check_action_permission_methods(self, obj):
"""
Actions with an allowed_permission attribute require the ModelAdmin to
implement a has_<perm>_permission() method for each permission.
"""
actions = obj._get_base_actions()
errors = []
for func, name, _ in actions:
if not hasattr(func, "allowed_permissions"):
continue
for permission in func.allowed_permissions:
method_name = "has_%s_permission" % permission
if not hasattr(obj, method_name):
errors.append(
checks.Error(
"%s must define a %s() method for the %s action."
% (
obj.__class__.__name__,
method_name,
func.__name__,
),
obj=obj.__class__,
id="admin.E129",
)
)
return errors
def _check_actions_uniqueness(self, obj):
"""Check that every action has a unique __name__."""
errors = []
names = collections.Counter(name for _, name, _ in obj._get_base_actions())
for name, count in names.items():
if count > 1:
errors.append(
checks.Error(
"__name__ attributes of actions defined in %s must be "
"unique. Name %r is not unique."
% (
obj.__class__.__name__,
name,
),
obj=obj.__class__,
id="admin.E130",
)
)
return errors
class InlineModelAdminChecks(BaseModelAdminChecks):
def check(self, inline_obj, **kwargs):
parent_model = inline_obj.parent_model
return [
*super().check(inline_obj),
*self._check_relation(inline_obj, parent_model),
*self._check_exclude_of_parent_model(inline_obj, parent_model),
*self._check_extra(inline_obj),
*self._check_max_num(inline_obj),
*self._check_min_num(inline_obj),
*self._check_formset(inline_obj),
]
def _check_exclude_of_parent_model(self, obj, parent_model):
# Do not perform more specific checks if the base checks result in an
# error.
errors = super()._check_exclude(obj)
if errors:
return []
# Skip if `fk_name` is invalid.
if self._check_relation(obj, parent_model):
return []
if obj.exclude is None:
return []
fk = _get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name)
if fk.name in obj.exclude:
return [
checks.Error(
"Cannot exclude the field '%s', because it is the foreign key "
"to the parent model '%s'."
% (
fk.name,
parent_model._meta.label,
),
obj=obj.__class__,
id="admin.E201",
)
]
else:
return []
def _check_relation(self, obj, parent_model):
try:
_get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name)
except ValueError as e:
return [checks.Error(e.args[0], obj=obj.__class__, id="admin.E202")]
else:
return []
def _check_extra(self, obj):
"""Check that extra is an integer."""
if not isinstance(obj.extra, int):
return must_be("an integer", option="extra", obj=obj, id="admin.E203")
else:
return []
def _check_max_num(self, obj):
"""Check that max_num is an integer."""
if obj.max_num is None:
return []
elif not isinstance(obj.max_num, int):
return must_be("an integer", option="max_num", obj=obj, id="admin.E204")
else:
return []
def _check_min_num(self, obj):
"""Check that min_num is an integer."""
if obj.min_num is None:
return []
elif not isinstance(obj.min_num, int):
return must_be("an integer", option="min_num", obj=obj, id="admin.E205")
else:
return []
def _check_formset(self, obj):
"""Check formset is a subclass of BaseModelFormSet."""
if not _issubclass(obj.formset, BaseModelFormSet):
return must_inherit_from(
parent="BaseModelFormSet", option="formset", obj=obj, id="admin.E206"
)
else:
return []
def must_be(type, option, obj, id):
return [
checks.Error(
"The value of '%s' must be %s." % (option, type),
obj=obj.__class__,
id=id,
),
]
def must_inherit_from(parent, option, obj, id):
return [
checks.Error(
"The value of '%s' must inherit from '%s'." % (option, parent),
obj=obj.__class__,
id=id,
),
]
def refer_to_missing_field(field, option, obj, id):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a field of '%s'."
% (option, field, obj.model._meta.label),
obj=obj.__class__,
id=id,
),
]
|
6223a097922718e25312ca69ad9092be52d63ec7609f69fc62d0d933361a2634 | """
Form Widget classes specific to the Django admin site.
"""
import copy
import json
from django import forms
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import URLValidator
from django.db.models import CASCADE, UUIDField
from django.urls import reverse
from django.urls.exceptions import NoReverseMatch
from django.utils.html import smart_urlquote
from django.utils.http import urlencode
from django.utils.text import Truncator
from django.utils.translation import get_language
from django.utils.translation import gettext as _
class FilteredSelectMultiple(forms.SelectMultiple):
"""
A SelectMultiple with a JavaScript filter interface.
Note that the resulting JavaScript assumes that the jsi18n
catalog has been loaded in the page
"""
class Media:
js = [
"admin/js/core.js",
"admin/js/SelectBox.js",
"admin/js/SelectFilter2.js",
]
def __init__(self, verbose_name, is_stacked, attrs=None, choices=()):
self.verbose_name = verbose_name
self.is_stacked = is_stacked
super().__init__(attrs, choices)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context["widget"]["attrs"]["class"] = "selectfilter"
if self.is_stacked:
context["widget"]["attrs"]["class"] += "stacked"
context["widget"]["attrs"]["data-field-name"] = self.verbose_name
context["widget"]["attrs"]["data-is-stacked"] = int(self.is_stacked)
return context
class AdminDateWidget(forms.DateInput):
class Media:
js = [
"admin/js/calendar.js",
"admin/js/admin/DateTimeShortcuts.js",
]
def __init__(self, attrs=None, format=None):
attrs = {"class": "vDateField", "size": "10", **(attrs or {})}
super().__init__(attrs=attrs, format=format)
class AdminTimeWidget(forms.TimeInput):
class Media:
js = [
"admin/js/calendar.js",
"admin/js/admin/DateTimeShortcuts.js",
]
def __init__(self, attrs=None, format=None):
attrs = {"class": "vTimeField", "size": "8", **(attrs or {})}
super().__init__(attrs=attrs, format=format)
class AdminSplitDateTime(forms.SplitDateTimeWidget):
"""
A SplitDateTime Widget that has some admin-specific styling.
"""
template_name = "admin/widgets/split_datetime.html"
def __init__(self, attrs=None):
widgets = [AdminDateWidget, AdminTimeWidget]
# Note that we're calling MultiWidget, not SplitDateTimeWidget, because
# we want to define widgets.
forms.MultiWidget.__init__(self, widgets, attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context["date_label"] = _("Date:")
context["time_label"] = _("Time:")
return context
class AdminRadioSelect(forms.RadioSelect):
template_name = "admin/widgets/radio.html"
class AdminFileWidget(forms.ClearableFileInput):
template_name = "admin/widgets/clearable_file_input.html"
def url_params_from_lookup_dict(lookups):
"""
Convert the type of lookups specified in a ForeignKey limit_choices_to
attribute to a dictionary of query parameters
"""
params = {}
if lookups and hasattr(lookups, "items"):
for k, v in lookups.items():
if callable(v):
v = v()
if isinstance(v, (tuple, list)):
v = ",".join(str(x) for x in v)
elif isinstance(v, bool):
v = ("0", "1")[v]
else:
v = str(v)
params[k] = v
return params
class ForeignKeyRawIdWidget(forms.TextInput):
"""
A Widget for displaying ForeignKeys in the "raw_id" interface rather than
in a <select> box.
"""
template_name = "admin/widgets/foreign_key_raw_id.html"
def __init__(self, rel, admin_site, attrs=None, using=None):
self.rel = rel
self.admin_site = admin_site
self.db = using
super().__init__(attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
rel_to = self.rel.model
if rel_to in self.admin_site._registry:
# The related object is registered with the same AdminSite
related_url = reverse(
"admin:%s_%s_changelist"
% (
rel_to._meta.app_label,
rel_to._meta.model_name,
),
current_app=self.admin_site.name,
)
params = self.url_parameters()
if params:
related_url += "?" + urlencode(params)
context["related_url"] = related_url
context["link_title"] = _("Lookup")
# The JavaScript code looks for this class.
css_class = "vForeignKeyRawIdAdminField"
if isinstance(self.rel.get_related_field(), UUIDField):
css_class += " vUUIDField"
context["widget"]["attrs"].setdefault("class", css_class)
else:
context["related_url"] = None
if context["widget"]["value"]:
context["link_label"], context["link_url"] = self.label_and_url_for_value(
value
)
else:
context["link_label"] = None
return context
def base_url_parameters(self):
limit_choices_to = self.rel.limit_choices_to
if callable(limit_choices_to):
limit_choices_to = limit_choices_to()
return url_params_from_lookup_dict(limit_choices_to)
def url_parameters(self):
from django.contrib.admin.views.main import TO_FIELD_VAR
params = self.base_url_parameters()
params.update({TO_FIELD_VAR: self.rel.get_related_field().name})
return params
def label_and_url_for_value(self, value):
key = self.rel.get_related_field().name
try:
obj = self.rel.model._default_manager.using(self.db).get(**{key: value})
except (ValueError, self.rel.model.DoesNotExist, ValidationError):
return "", ""
try:
url = reverse(
"%s:%s_%s_change"
% (
self.admin_site.name,
obj._meta.app_label,
obj._meta.object_name.lower(),
),
args=(obj.pk,),
)
except NoReverseMatch:
url = "" # Admin not registered for target model.
return Truncator(obj).words(14), url
class ManyToManyRawIdWidget(ForeignKeyRawIdWidget):
"""
A Widget for displaying ManyToMany ids in the "raw_id" interface rather than
in a <select multiple> box.
"""
template_name = "admin/widgets/many_to_many_raw_id.html"
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if self.rel.model in self.admin_site._registry:
# The related object is registered with the same AdminSite
context["widget"]["attrs"]["class"] = "vManyToManyRawIdAdminField"
return context
def url_parameters(self):
return self.base_url_parameters()
def label_and_url_for_value(self, value):
return "", ""
def value_from_datadict(self, data, files, name):
value = data.get(name)
if value:
return value.split(",")
def format_value(self, value):
return ",".join(str(v) for v in value) if value else ""
class RelatedFieldWidgetWrapper(forms.Widget):
"""
This class is a wrapper to a given widget to add the add icon for the
admin interface.
"""
template_name = "admin/widgets/related_widget_wrapper.html"
def __init__(
self,
widget,
rel,
admin_site,
can_add_related=None,
can_change_related=False,
can_delete_related=False,
can_view_related=False,
):
self.needs_multipart_form = widget.needs_multipart_form
self.attrs = widget.attrs
self.choices = widget.choices
self.widget = widget
self.rel = rel
# Backwards compatible check for whether a user can add related
# objects.
if can_add_related is None:
can_add_related = rel.model in admin_site._registry
self.can_add_related = can_add_related
# XXX: The UX does not support multiple selected values.
multiple = getattr(widget, "allow_multiple_selected", False)
self.can_change_related = not multiple and can_change_related
# XXX: The deletion UX can be confusing when dealing with cascading deletion.
cascade = getattr(rel, "on_delete", None) is CASCADE
self.can_delete_related = not multiple and not cascade and can_delete_related
self.can_view_related = not multiple and can_view_related
# so we can check if the related object is registered with this AdminSite
self.admin_site = admin_site
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.widget = copy.deepcopy(self.widget, memo)
obj.attrs = self.widget.attrs
memo[id(self)] = obj
return obj
@property
def is_hidden(self):
return self.widget.is_hidden
@property
def media(self):
return self.widget.media
def get_related_url(self, info, action, *args):
return reverse(
"admin:%s_%s_%s" % (info + (action,)),
current_app=self.admin_site.name,
args=args,
)
def get_context(self, name, value, attrs):
from django.contrib.admin.views.main import IS_POPUP_VAR, TO_FIELD_VAR
rel_opts = self.rel.model._meta
info = (rel_opts.app_label, rel_opts.model_name)
self.widget.choices = self.choices
related_field_name = self.rel.get_related_field().name
url_params = "&".join(
"%s=%s" % param
for param in [
(TO_FIELD_VAR, related_field_name),
(IS_POPUP_VAR, 1),
]
)
context = {
"rendered_widget": self.widget.render(name, value, attrs),
"is_hidden": self.is_hidden,
"name": name,
"url_params": url_params,
"model": rel_opts.verbose_name,
"can_add_related": self.can_add_related,
"can_change_related": self.can_change_related,
"can_delete_related": self.can_delete_related,
"can_view_related": self.can_view_related,
"model_has_limit_choices_to": self.rel.limit_choices_to,
}
if self.can_add_related:
context["add_related_url"] = self.get_related_url(info, "add")
if self.can_delete_related:
context["delete_related_template_url"] = self.get_related_url(
info, "delete", "__fk__"
)
if self.can_view_related or self.can_change_related:
context["view_related_url_params"] = f"{TO_FIELD_VAR}={related_field_name}"
context["change_related_template_url"] = self.get_related_url(
info, "change", "__fk__"
)
return context
def value_from_datadict(self, data, files, name):
return self.widget.value_from_datadict(data, files, name)
def value_omitted_from_data(self, data, files, name):
return self.widget.value_omitted_from_data(data, files, name)
def id_for_label(self, id_):
return self.widget.id_for_label(id_)
class AdminTextareaWidget(forms.Textarea):
def __init__(self, attrs=None):
super().__init__(attrs={"class": "vLargeTextField", **(attrs or {})})
class AdminTextInputWidget(forms.TextInput):
def __init__(self, attrs=None):
super().__init__(attrs={"class": "vTextField", **(attrs or {})})
class AdminEmailInputWidget(forms.EmailInput):
def __init__(self, attrs=None):
super().__init__(attrs={"class": "vTextField", **(attrs or {})})
class AdminURLFieldWidget(forms.URLInput):
template_name = "admin/widgets/url.html"
def __init__(self, attrs=None, validator_class=URLValidator):
super().__init__(attrs={"class": "vURLField", **(attrs or {})})
self.validator = validator_class()
def get_context(self, name, value, attrs):
try:
self.validator(value if value else "")
url_valid = True
except ValidationError:
url_valid = False
context = super().get_context(name, value, attrs)
context["current_label"] = _("Currently:")
context["change_label"] = _("Change:")
context["widget"]["href"] = (
smart_urlquote(context["widget"]["value"]) if value else ""
)
context["url_valid"] = url_valid
return context
class AdminIntegerFieldWidget(forms.NumberInput):
class_name = "vIntegerField"
def __init__(self, attrs=None):
super().__init__(attrs={"class": self.class_name, **(attrs or {})})
class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget):
class_name = "vBigIntegerField"
class AdminUUIDInputWidget(forms.TextInput):
def __init__(self, attrs=None):
super().__init__(attrs={"class": "vUUIDField", **(attrs or {})})
# Mapping of lowercase language codes [returned by Django's get_language()] to
# language codes supported by select2.
# See django/contrib/admin/static/admin/js/vendor/select2/i18n/*
SELECT2_TRANSLATIONS = {
x.lower(): x
for x in [
"ar",
"az",
"bg",
"ca",
"cs",
"da",
"de",
"el",
"en",
"es",
"et",
"eu",
"fa",
"fi",
"fr",
"gl",
"he",
"hi",
"hr",
"hu",
"id",
"is",
"it",
"ja",
"km",
"ko",
"lt",
"lv",
"mk",
"ms",
"nb",
"nl",
"pl",
"pt-BR",
"pt",
"ro",
"ru",
"sk",
"sr-Cyrl",
"sr",
"sv",
"th",
"tr",
"uk",
"vi",
]
}
SELECT2_TRANSLATIONS.update({"zh-hans": "zh-CN", "zh-hant": "zh-TW"})
def get_select2_language():
lang_code = get_language()
supported_code = SELECT2_TRANSLATIONS.get(lang_code)
if supported_code is None:
# If 'zh-hant-tw' is not supported, try subsequent language codes i.e.
# 'zh-hant' and 'zh'.
i = None
while (i := lang_code.rfind("-", 0, i)) > -1:
if supported_code := SELECT2_TRANSLATIONS.get(lang_code[:i]):
return supported_code
return supported_code
class AutocompleteMixin:
"""
Select widget mixin that loads options from AutocompleteJsonView via AJAX.
Renders the necessary data attributes for select2 and adds the static form
media.
"""
url_name = "%s:autocomplete"
def __init__(self, field, admin_site, attrs=None, choices=(), using=None):
self.field = field
self.admin_site = admin_site
self.db = using
self.choices = choices
self.attrs = {} if attrs is None else attrs.copy()
self.i18n_name = get_select2_language()
def get_url(self):
return reverse(self.url_name % self.admin_site.name)
def build_attrs(self, base_attrs, extra_attrs=None):
"""
Set select2's AJAX attributes.
Attributes can be set using the html5 data attribute.
Nested attributes require a double dash as per
https://select2.org/configuration/data-attributes#nested-subkey-options
"""
attrs = super().build_attrs(base_attrs, extra_attrs=extra_attrs)
attrs.setdefault("class", "")
attrs.update(
{
"data-ajax--cache": "true",
"data-ajax--delay": 250,
"data-ajax--type": "GET",
"data-ajax--url": self.get_url(),
"data-app-label": self.field.model._meta.app_label,
"data-model-name": self.field.model._meta.model_name,
"data-field-name": self.field.name,
"data-theme": "admin-autocomplete",
"data-allow-clear": json.dumps(not self.is_required),
"data-placeholder": "", # Allows clearing of the input.
"lang": self.i18n_name,
"class": attrs["class"]
+ (" " if attrs["class"] else "")
+ "admin-autocomplete",
}
)
return attrs
def optgroups(self, name, value, attr=None):
"""Return selected options based on the ModelChoiceIterator."""
default = (None, [], 0)
groups = [default]
has_selected = False
selected_choices = {
str(v) for v in value if str(v) not in self.choices.field.empty_values
}
if not self.is_required and not self.allow_multiple_selected:
default[1].append(self.create_option(name, "", "", False, 0))
remote_model_opts = self.field.remote_field.model._meta
to_field_name = getattr(
self.field.remote_field, "field_name", remote_model_opts.pk.attname
)
to_field_name = remote_model_opts.get_field(to_field_name).attname
choices = (
(getattr(obj, to_field_name), self.choices.field.label_from_instance(obj))
for obj in self.choices.queryset.using(self.db).filter(
**{"%s__in" % to_field_name: selected_choices}
)
)
for option_value, option_label in choices:
selected = str(option_value) in value and (
has_selected is False or self.allow_multiple_selected
)
has_selected |= selected
index = len(default[1])
subgroup = default[1]
subgroup.append(
self.create_option(
name, option_value, option_label, selected_choices, index
)
)
return groups
@property
def media(self):
extra = "" if settings.DEBUG else ".min"
i18n_file = (
("admin/js/vendor/select2/i18n/%s.js" % self.i18n_name,)
if self.i18n_name
else ()
)
return forms.Media(
js=(
"admin/js/vendor/jquery/jquery%s.js" % extra,
"admin/js/vendor/select2/select2.full%s.js" % extra,
)
+ i18n_file
+ (
"admin/js/jquery.init.js",
"admin/js/autocomplete.js",
),
css={
"screen": (
"admin/css/vendor/select2/select2%s.css" % extra,
"admin/css/autocomplete.css",
),
},
)
class AutocompleteSelect(AutocompleteMixin, forms.Select):
pass
class AutocompleteSelectMultiple(AutocompleteMixin, forms.SelectMultiple):
pass
|
7cfc8293da412d6a23bb3ae167af9d59022f0f7929620fc7c2c1734b185ac20f | from django.contrib.admin import (
HORIZONTAL,
VERTICAL,
AdminSite,
ModelAdmin,
StackedInline,
TabularInline,
action,
autodiscover,
display,
register,
site,
)
from django.contrib.gis.admin.options import GeoModelAdmin, GISModelAdmin, OSMGeoAdmin
from django.contrib.gis.admin.widgets import OpenLayersWidget
__all__ = [
"HORIZONTAL",
"VERTICAL",
"AdminSite",
"ModelAdmin",
"StackedInline",
"TabularInline",
"action",
"autodiscover",
"display",
"register",
"site",
"GISModelAdmin",
# RemovedInDjango50Warning.
"GeoModelAdmin",
"OpenLayersWidget",
"OSMGeoAdmin",
]
|
01240e0759f52efda8be2a32370162ea2917621dca7fbfb58ed5888efc43d283 | # RemovedInDjango50Warning.
import logging
import warnings
from django.contrib.gis.gdal import GDALException
from django.contrib.gis.geos import GEOSException, GEOSGeometry
from django.forms.widgets import Textarea
from django.utils import translation
from django.utils.deprecation import RemovedInDjango50Warning
# Creating a template context that contains Django settings
# values needed by admin map templates.
geo_context = {"LANGUAGE_BIDI": translation.get_language_bidi()}
logger = logging.getLogger("django.contrib.gis")
class OpenLayersWidget(Textarea):
"""
Render an OpenLayers map using the WKT of the geometry.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"django.contrib.gis.admin.OpenLayersWidget is deprecated.",
RemovedInDjango50Warning,
stacklevel=2,
)
super().__init__(*args, **kwargs)
def get_context(self, name, value, attrs):
# Update the template parameters with any attributes passed in.
if attrs:
self.params.update(attrs)
self.params["editable"] = self.params["modifiable"]
else:
self.params["editable"] = True
# Defaulting the WKT value to a blank string -- this
# will be tested in the JavaScript and the appropriate
# interface will be constructed.
self.params["wkt"] = ""
# If a string reaches here (via a validation error on another
# field) then just reconstruct the Geometry.
if value and isinstance(value, str):
try:
value = GEOSGeometry(value)
except (GEOSException, ValueError) as err:
logger.error("Error creating geometry from value '%s' (%s)", value, err)
value = None
if (
value
and value.geom_type.upper() != self.geom_type
and self.geom_type != "GEOMETRY"
):
value = None
# Constructing the dictionary of the map options.
self.params["map_options"] = self.map_options()
# Constructing the JavaScript module name using the name of
# the GeometryField (passed in via the `attrs` keyword).
# Use the 'name' attr for the field name (rather than 'field')
self.params["name"] = name
# note: we must switch out dashes for underscores since js
# functions are created using the module variable
js_safe_name = self.params["name"].replace("-", "_")
self.params["module"] = "geodjango_%s" % js_safe_name
if value:
# Transforming the geometry to the projection used on the
# OpenLayers map.
srid = self.params["srid"]
if value.srid != srid:
try:
ogr = value.ogr
ogr.transform(srid)
wkt = ogr.wkt
except GDALException as err:
logger.error(
"Error transforming geometry from srid '%s' to srid '%s' (%s)",
value.srid,
srid,
err,
)
wkt = ""
else:
wkt = value.wkt
# Setting the parameter WKT with that of the transformed
# geometry.
self.params["wkt"] = wkt
self.params.update(geo_context)
return self.params
def map_options(self):
"""Build the map options hash for the OpenLayers template."""
# JavaScript construction utilities for the Bounds and Projection.
def ol_bounds(extent):
return "new OpenLayers.Bounds(%s)" % extent
def ol_projection(srid):
return 'new OpenLayers.Projection("EPSG:%s")' % srid
# An array of the parameter name, the name of their OpenLayers
# counterpart, and the type of variable they are.
map_types = [
("srid", "projection", "srid"),
("display_srid", "displayProjection", "srid"),
("units", "units", str),
("max_resolution", "maxResolution", float),
("max_extent", "maxExtent", "bounds"),
("num_zoom", "numZoomLevels", int),
("max_zoom", "maxZoomLevels", int),
("min_zoom", "minZoomLevel", int),
]
# Building the map options hash.
map_options = {}
for param_name, js_name, option_type in map_types:
if self.params.get(param_name, False):
if option_type == "srid":
value = ol_projection(self.params[param_name])
elif option_type == "bounds":
value = ol_bounds(self.params[param_name])
elif option_type in (float, int):
value = self.params[param_name]
elif option_type in (str,):
value = '"%s"' % self.params[param_name]
else:
raise TypeError
map_options[js_name] = value
return map_options
|
d04b799e8adc709ab0f7d6828fecc07356fe0a4976f908ced5b343431ce25936 | """
Tests for django.core.servers.
"""
import errno
import os
import socket
import threading
from http.client import HTTPConnection
from urllib.error import HTTPError
from urllib.parse import urlencode
from urllib.request import urlopen
from django.conf import settings
from django.core.servers.basehttp import ThreadedWSGIServer, WSGIServer
from django.db import DEFAULT_DB_ALIAS, connections
from django.test import LiveServerTestCase, override_settings
from django.test.testcases import LiveServerThread, QuietWSGIRequestHandler
from .models import Person
TEST_ROOT = os.path.dirname(__file__)
TEST_SETTINGS = {
"MEDIA_URL": "media/",
"MEDIA_ROOT": os.path.join(TEST_ROOT, "media"),
"STATIC_URL": "static/",
"STATIC_ROOT": os.path.join(TEST_ROOT, "static"),
}
@override_settings(ROOT_URLCONF="servers.urls", **TEST_SETTINGS)
class LiveServerBase(LiveServerTestCase):
available_apps = [
"servers",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
]
fixtures = ["testdata.json"]
def urlopen(self, url):
return urlopen(self.live_server_url + url)
class CloseConnectionTestServer(ThreadedWSGIServer):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# This event is set right after the first time a request closes its
# database connections.
self._connections_closed = threading.Event()
def _close_connections(self):
super()._close_connections()
self._connections_closed.set()
class CloseConnectionTestLiveServerThread(LiveServerThread):
server_class = CloseConnectionTestServer
def _create_server(self, connections_override=None):
return super()._create_server(connections_override=self.connections_override)
class LiveServerTestCloseConnectionTest(LiveServerBase):
server_thread_class = CloseConnectionTestLiveServerThread
@classmethod
def _make_connections_override(cls):
conn = connections[DEFAULT_DB_ALIAS]
cls.conn = conn
cls.old_conn_max_age = conn.settings_dict["CONN_MAX_AGE"]
# Set the connection's CONN_MAX_AGE to None to simulate the
# CONN_MAX_AGE setting being set to None on the server. This prevents
# Django from closing the connection and allows testing that
# ThreadedWSGIServer closes connections.
conn.settings_dict["CONN_MAX_AGE"] = None
# Pass a database connection through to the server to check it is being
# closed by ThreadedWSGIServer.
return {DEFAULT_DB_ALIAS: conn}
@classmethod
def tearDownConnectionTest(cls):
cls.conn.settings_dict["CONN_MAX_AGE"] = cls.old_conn_max_age
@classmethod
def tearDownClass(cls):
cls.tearDownConnectionTest()
super().tearDownClass()
def test_closes_connections(self):
# The server's request thread sets this event after closing
# its database connections.
closed_event = self.server_thread.httpd._connections_closed
conn = self.conn
# Open a connection to the database.
conn.connect()
self.assertIsNotNone(conn.connection)
with self.urlopen("/model_view/") as f:
# The server can access the database.
self.assertCountEqual(f.read().splitlines(), [b"jane", b"robert"])
# Wait for the server's request thread to close the connection.
# A timeout of 0.1 seconds should be more than enough. If the wait
# times out, the assertion after should fail.
closed_event.wait(timeout=0.1)
self.assertIsNone(conn.connection)
class FailingLiveServerThread(LiveServerThread):
def _create_server(self):
raise RuntimeError("Error creating server.")
class LiveServerTestCaseSetupTest(LiveServerBase):
server_thread_class = FailingLiveServerThread
@classmethod
def check_allowed_hosts(cls, expected):
if settings.ALLOWED_HOSTS != expected:
raise RuntimeError(f"{settings.ALLOWED_HOSTS} != {expected}")
@classmethod
def setUpClass(cls):
cls.check_allowed_hosts(["testserver"])
try:
super().setUpClass()
except RuntimeError:
# LiveServerTestCase's change to ALLOWED_HOSTS should be reverted.
cls.doClassCleanups()
cls.check_allowed_hosts(["testserver"])
else:
raise RuntimeError("Server did not fail.")
cls.set_up_called = True
def test_set_up_class(self):
self.assertIs(self.set_up_called, True)
class LiveServerAddress(LiveServerBase):
@classmethod
def setUpClass(cls):
super().setUpClass()
# put it in a list to prevent descriptor lookups in test
cls.live_server_url_test = [cls.live_server_url]
def test_live_server_url_is_class_property(self):
self.assertIsInstance(self.live_server_url_test[0], str)
self.assertEqual(self.live_server_url_test[0], self.live_server_url)
class LiveServerSingleThread(LiveServerThread):
def _create_server(self):
return WSGIServer(
(self.host, self.port), QuietWSGIRequestHandler, allow_reuse_address=False
)
class SingleThreadLiveServerTestCase(LiveServerTestCase):
server_thread_class = LiveServerSingleThread
class LiveServerViews(LiveServerBase):
def test_protocol(self):
"""Launched server serves with HTTP 1.1."""
with self.urlopen("/example_view/") as f:
self.assertEqual(f.version, 11)
def test_closes_connection_without_content_length(self):
"""
An HTTP 1.1 server is supposed to support keep-alive. Since our
development server is rather simple we support it only in cases where
we can detect a content length from the response. This should be doable
for all simple views and streaming responses where an iterable with
length of one is passed. The latter follows as result of `set_content_length`
from https://github.com/python/cpython/blob/main/Lib/wsgiref/handlers.py.
If we cannot detect a content length we explicitly set the `Connection`
header to `close` to notify the client that we do not actually support
it.
"""
conn = HTTPConnection(
LiveServerViews.server_thread.host,
LiveServerViews.server_thread.port,
timeout=1,
)
try:
conn.request(
"GET", "/streaming_example_view/", headers={"Connection": "keep-alive"}
)
response = conn.getresponse()
self.assertTrue(response.will_close)
self.assertEqual(response.read(), b"Iamastream")
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader("Connection"), "close")
conn.request(
"GET", "/streaming_example_view/", headers={"Connection": "close"}
)
response = conn.getresponse()
self.assertTrue(response.will_close)
self.assertEqual(response.read(), b"Iamastream")
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader("Connection"), "close")
finally:
conn.close()
def test_keep_alive_on_connection_with_content_length(self):
"""
See `test_closes_connection_without_content_length` for details. This
is a follow up test, which ensure that we do not close the connection
if not needed, hence allowing us to take advantage of keep-alive.
"""
conn = HTTPConnection(
LiveServerViews.server_thread.host, LiveServerViews.server_thread.port
)
try:
conn.request("GET", "/example_view/", headers={"Connection": "keep-alive"})
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.read(), b"example view")
self.assertEqual(response.status, 200)
self.assertIsNone(response.getheader("Connection"))
conn.request("GET", "/example_view/", headers={"Connection": "close"})
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.read(), b"example view")
self.assertEqual(response.status, 200)
self.assertIsNone(response.getheader("Connection"))
finally:
conn.close()
def test_keep_alive_connection_clears_previous_request_data(self):
conn = HTTPConnection(
LiveServerViews.server_thread.host, LiveServerViews.server_thread.port
)
try:
conn.request(
"POST", "/method_view/", b"{}", headers={"Connection": "keep-alive"}
)
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.status, 200)
self.assertEqual(response.read(), b"POST")
conn.request(
"POST", "/method_view/", b"{}", headers={"Connection": "close"}
)
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.status, 200)
self.assertEqual(response.read(), b"POST")
finally:
conn.close()
def test_404(self):
with self.assertRaises(HTTPError) as err:
self.urlopen("/")
err.exception.close()
self.assertEqual(err.exception.code, 404, "Expected 404 response")
def test_view(self):
with self.urlopen("/example_view/") as f:
self.assertEqual(f.read(), b"example view")
def test_static_files(self):
with self.urlopen("/static/example_static_file.txt") as f:
self.assertEqual(f.read().rstrip(b"\r\n"), b"example static file")
def test_no_collectstatic_emulation(self):
"""
LiveServerTestCase reports a 404 status code when HTTP client
tries to access a static file that isn't explicitly put under
STATIC_ROOT.
"""
with self.assertRaises(HTTPError) as err:
self.urlopen("/static/another_app/another_app_static_file.txt")
err.exception.close()
self.assertEqual(err.exception.code, 404, "Expected 404 response")
def test_media_files(self):
with self.urlopen("/media/example_media_file.txt") as f:
self.assertEqual(f.read().rstrip(b"\r\n"), b"example media file")
def test_environ(self):
with self.urlopen("/environ_view/?%s" % urlencode({"q": "тест"})) as f:
self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read())
@override_settings(ROOT_URLCONF="servers.urls")
class SingleThreadLiveServerViews(SingleThreadLiveServerTestCase):
available_apps = ["servers"]
def test_closes_connection_with_content_length(self):
"""
Contrast to
LiveServerViews.test_keep_alive_on_connection_with_content_length().
Persistent connections require threading server.
"""
conn = HTTPConnection(
SingleThreadLiveServerViews.server_thread.host,
SingleThreadLiveServerViews.server_thread.port,
timeout=1,
)
try:
conn.request("GET", "/example_view/", headers={"Connection": "keep-alive"})
response = conn.getresponse()
self.assertTrue(response.will_close)
self.assertEqual(response.read(), b"example view")
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader("Connection"), "close")
finally:
conn.close()
class LiveServerDatabase(LiveServerBase):
def test_fixtures_loaded(self):
"""
Fixtures are properly loaded and visible to the live server thread.
"""
with self.urlopen("/model_view/") as f:
self.assertCountEqual(f.read().splitlines(), [b"jane", b"robert"])
def test_database_writes(self):
"""
Data written to the database by a view can be read.
"""
with self.urlopen("/create_model_instance/"):
pass
self.assertQuerysetEqual(
Person.objects.order_by("pk"),
["jane", "robert", "emily"],
lambda b: b.name,
)
class LiveServerPort(LiveServerBase):
def test_port_bind(self):
"""
Each LiveServerTestCase binds to a unique port or fails to start a
server thread when run concurrently (#26011).
"""
TestCase = type("TestCase", (LiveServerBase,), {})
try:
TestCase._start_server_thread()
except OSError as e:
if e.errno == errno.EADDRINUSE:
# We're out of ports, LiveServerTestCase correctly fails with
# an OSError.
return
# Unexpected error.
raise
self.assertNotEqual(
self.live_server_url,
TestCase.live_server_url,
f"Acquired duplicate server addresses for server threads: "
f"{self.live_server_url}",
)
def test_specified_port_bind(self):
"""LiveServerTestCase.port customizes the server's port."""
TestCase = type("TestCase", (LiveServerBase,), {})
# Find an open port and tell TestCase to use it.
s = socket.socket()
s.bind(("", 0))
TestCase.port = s.getsockname()[1]
s.close()
TestCase._start_server_thread()
self.assertEqual(
TestCase.port,
TestCase.server_thread.port,
f"Did not use specified port for LiveServerTestCase thread: "
f"{TestCase.port}",
)
class LiveServerThreadedTests(LiveServerBase):
"""If LiveServerTestCase isn't threaded, these tests will hang."""
def test_view_calls_subview(self):
url = "/subview_calling_view/?%s" % urlencode({"url": self.live_server_url})
with self.urlopen(url) as f:
self.assertEqual(f.read(), b"subview calling view: subview")
def test_check_model_instance_from_subview(self):
url = "/check_model_instance_from_subview/?%s" % urlencode(
{
"url": self.live_server_url,
}
)
with self.urlopen(url) as f:
self.assertIn(b"emily", f.read())
|
a034c3027f0fc99196ab92dc74b41b8c2f90d83966dcd1d2ea5fd2fb6ed465ac | import datetime
import importlib
import io
import os
import shutil
import sys
from unittest import mock
from django.apps import apps
from django.core.management import CommandError, call_command
from django.db import (
ConnectionHandler,
DatabaseError,
OperationalError,
connection,
connections,
models,
)
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.utils import truncate_name
from django.db.migrations.exceptions import InconsistentMigrationHistory
from django.db.migrations.recorder import MigrationRecorder
from django.test import TestCase, override_settings, skipUnlessDBFeature
from django.test.utils import captured_stdout
from django.utils import timezone
from django.utils.version import get_docs_version
from .models import UnicodeModel, UnserializableModel
from .routers import TestRouter
from .test_base import MigrationTestBase
HAS_BLACK = shutil.which("black")
class MigrateTests(MigrationTestBase):
"""
Tests running the migrate command.
"""
databases = {"default", "other"}
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate(self):
"""
Tests basic usage of the migrate command.
"""
# No tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run the migrations to 0001 only
stdout = io.StringIO()
call_command(
"migrate", "migrations", "0001", verbosity=2, stdout=stdout, no_color=True
)
stdout = stdout.getvalue()
self.assertIn(
"Target specific migration: 0001_initial, from migrations", stdout
)
self.assertIn("Applying migrations.0001_initial... OK", stdout)
self.assertIn("Running pre-migrate handlers for application migrations", stdout)
self.assertIn(
"Running post-migrate handlers for application migrations", stdout
)
# The correct tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run migrations all the way
call_command("migrate", verbosity=0)
# The correct tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Unmigrate everything
stdout = io.StringIO()
call_command(
"migrate", "migrations", "zero", verbosity=2, stdout=stdout, no_color=True
)
stdout = stdout.getvalue()
self.assertIn("Unapply all migrations: migrations", stdout)
self.assertIn("Unapplying migrations.0002_second... OK", stdout)
self.assertIn("Running pre-migrate handlers for application migrations", stdout)
self.assertIn(
"Running post-migrate handlers for application migrations", stdout
)
# Tables are gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_with_system_checks(self):
out = io.StringIO()
call_command("migrate", skip_checks=False, no_color=True, stdout=out)
self.assertIn("Apply all migrations: migrated_app", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unmigrated_app_syncdb",
]
)
def test_app_without_migrations(self):
msg = "App 'unmigrated_app_syncdb' does not have migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", app_label="unmigrated_app_syncdb")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_clashing_prefix"}
)
def test_ambiguous_prefix(self):
msg = (
"More than one migration matches 'a' in app 'migrations'. Please "
"be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", app_label="migrations", migration_name="a")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_unknown_prefix(self):
msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"migrate", app_label="migrations", migration_name="nonexistent"
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}
)
def test_migrate_initial_false(self):
"""
`Migration.initial = False` skips fake-initial detection.
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Fake rollback
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure fake-initial detection does not run
with self.assertRaises(DatabaseError):
call_command(
"migrate", "migrations", "0001", fake_initial=True, verbosity=0
)
call_command("migrate", "migrations", "0001", fake=True, verbosity=0)
# Real rollback
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations"},
DATABASE_ROUTERS=["migrations.routers.TestRouter"],
)
def test_migrate_fake_initial(self):
"""
--fake-initial only works if all tables created in the initial
migration of an app exists. Database routers must be obeyed when doing
that check.
"""
# Make sure no tables are created
for db in self.databases:
self.assertTableNotExists("migrations_author", using=db)
self.assertTableNotExists("migrations_tribble", using=db)
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
call_command("migrate", "migrations", "0001", verbosity=0, database="other")
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Also check the "other" database
self.assertTableNotExists("migrations_author", using="other")
self.assertTableExists("migrations_tribble", using="other")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
call_command(
"migrate", "migrations", "zero", fake=True, verbosity=0, database="other"
)
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble", using="other")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", "0001", verbosity=0)
# Run initial migration with an explicit --fake-initial
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
stdout=out,
verbosity=1,
)
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
verbosity=0,
database="other",
)
self.assertIn("migrations.0001_initial... faked", out.getvalue().lower())
try:
# Run migrations all the way.
call_command("migrate", verbosity=0)
call_command("migrate", verbosity=0, database="other")
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
self.assertTableNotExists("migrations_author", using="other")
self.assertTableNotExists("migrations_tribble", using="other")
self.assertTableNotExists("migrations_book", using="other")
# Fake a roll-back.
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
call_command(
"migrate",
"migrations",
"zero",
fake=True,
verbosity=0,
database="other",
)
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Run initial migration.
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", verbosity=0)
# Run initial migration with an explicit --fake-initial.
with self.assertRaises(DatabaseError):
# Fails because "migrations_tribble" does not exist but needs
# to in order to make --fake-initial work.
call_command("migrate", "migrations", fake_initial=True, verbosity=0)
# Fake an apply.
call_command("migrate", "migrations", fake=True, verbosity=0)
call_command(
"migrate", "migrations", fake=True, verbosity=0, database="other"
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
call_command("migrate", "migrations", "zero", verbosity=0, database="other")
# Make sure it's all gone
for db in self.databases:
self.assertTableNotExists("migrations_author", using=db)
self.assertTableNotExists("migrations_tribble", using=db)
self.assertTableNotExists("migrations_book", using=db)
@skipUnlessDBFeature("ignores_table_name_case")
def test_migrate_fake_initial_case_insensitive(self):
with override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_fake_initial_case_insensitive.initial",
}
):
call_command("migrate", "migrations", "0001", verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
with override_settings(
MIGRATION_MODULES={
"migrations": (
"migrations.test_fake_initial_case_insensitive.fake_initial"
),
}
):
out = io.StringIO()
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
stdout=out,
verbosity=1,
no_color=True,
)
self.assertIn(
"migrations.0001_initial... faked",
out.getvalue().lower(),
)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_fake_split_initial"
}
)
def test_migrate_fake_split_initial(self):
"""
Split initial migrations can be faked with --fake-initial.
"""
try:
call_command("migrate", "migrations", "0002", verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"migrate",
"migrations",
"0002",
fake_initial=True,
stdout=out,
verbosity=1,
)
value = out.getvalue().lower()
self.assertIn("migrations.0001_initial... faked", value)
self.assertIn("migrations.0002_second... faked", value)
finally:
# Fake an apply.
call_command("migrate", "migrations", fake=True, verbosity=0)
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}
)
def test_migrate_conflict_exit(self):
"""
migrate exits if it detects a conflict.
"""
msg = (
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (0002_conflicting_second, 0002_second in "
"migrations).\n"
"To fix them run 'python manage.py makemigrations --merge'"
)
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", "migrations")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
}
)
def test_migrate_check(self):
with self.assertRaises(SystemExit):
call_command("migrate", "migrations", "0001", check_unapplied=True)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_plan",
}
)
def test_migrate_check_plan(self):
out = io.StringIO()
with self.assertRaises(SystemExit):
call_command(
"migrate",
"migrations",
"0001",
check_unapplied=True,
plan=True,
stdout=out,
no_color=True,
)
self.assertEqual(
"Planned operations:\n"
"migrations.0001_initial\n"
" Create model Salamander\n"
" Raw Python operation -> Grow salamander tail.\n",
out.getvalue(),
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_showmigrations_list(self):
"""
showmigrations --list displays migrations and whether or not they're
applied.
"""
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: True
):
call_command(
"showmigrations", format="list", stdout=out, verbosity=0, no_color=False
)
self.assertEqual(
"\x1b[1mmigrations\n\x1b[0m [ ] 0001_initial\n [ ] 0002_second\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = io.StringIO()
# Giving the explicit app_label tests for selective `show_list` in the command
call_command(
"showmigrations",
"migrations",
format="list",
stdout=out,
verbosity=0,
no_color=True,
)
self.assertEqual(
"migrations\n [x] 0001_initial\n [ ] 0002_second\n", out.getvalue().lower()
)
out = io.StringIO()
# Applied datetimes are displayed at verbosity 2+.
call_command(
"showmigrations", "migrations", stdout=out, verbosity=2, no_color=True
)
migration1 = MigrationRecorder(connection).migration_qs.get(
app="migrations", name="0001_initial"
)
self.assertEqual(
"migrations\n"
" [x] 0001_initial (applied at %s)\n"
" [ ] 0002_second\n" % migration1.applied.strftime("%Y-%m-%d %H:%M:%S"),
out.getvalue().lower(),
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_showmigrations_list_squashed(self):
out = io.StringIO()
call_command(
"showmigrations", format="list", stdout=out, verbosity=2, no_color=True
)
self.assertEqual(
"migrations\n [ ] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command(
"migrate",
"migrations",
"0001_squashed_0002",
stdout=out,
verbosity=2,
no_color=True,
)
try:
self.assertIn(
"operations to perform:\n"
" target specific migration: 0001_squashed_0002, from migrations\n"
"running pre-migrate handlers for application migrations\n"
"running migrations:\n"
" applying migrations.0001_squashed_0002... ok (",
out.getvalue().lower(),
)
out = io.StringIO()
call_command(
"showmigrations", format="list", stdout=out, verbosity=2, no_color=True
)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}
)
def test_showmigrations_plan(self):
"""
Tests --plan output of showmigrations command
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third\n"
"[ ] migrations.0002_second\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial, "
"migrations.0003_third)\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "0003", verbosity=0)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third\n"
"[ ] migrations.0002_second\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial, "
"migrations.0003_third)\n",
out.getvalue().lower(),
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_plan"}
)
def test_migrate_plan(self):
"""Tests migrate --plan output."""
out = io.StringIO()
# Show the plan up to the third migration.
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0001_initial\n"
" Create model Salamander\n"
" Raw Python operation -> Grow salamander tail.\n"
"migrations.0002_second\n"
" Create model Book\n"
" Raw SQL operation -> ['SELECT * FROM migrations_book']\n"
"migrations.0003_third\n"
" Create model Author\n"
" Raw SQL operation -> ['SELECT * FROM migrations_author']\n",
out.getvalue(),
)
try:
# Migrate to the third migration.
call_command("migrate", "migrations", "0003", verbosity=0)
out = io.StringIO()
# Show the plan for when there is nothing to apply.
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n No planned migration operations.\n",
out.getvalue(),
)
out = io.StringIO()
# Show the plan for reverse migration back to 0001.
call_command(
"migrate", "migrations", "0001", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0003_third\n"
" Undo Create model Author\n"
" Raw SQL operation -> ['SELECT * FROM migrations_book']\n"
"migrations.0002_second\n"
" Undo Create model Book\n"
" Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n",
out.getvalue(),
)
out = io.StringIO()
# Show the migration plan to fourth, with truncated details.
call_command(
"migrate", "migrations", "0004", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0004_fourth\n"
" Raw SQL operation -> SELECT * FROM migrations_author WHE…\n",
out.getvalue(),
)
# Show the plan when an operation is irreversible.
# Migrate to the fourth migration.
call_command("migrate", "migrations", "0004", verbosity=0)
out = io.StringIO()
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0004_fourth\n"
" Raw SQL operation -> IRREVERSIBLE\n",
out.getvalue(),
)
out = io.StringIO()
call_command(
"migrate", "migrations", "0005", plan=True, stdout=out, no_color=True
)
# Operation is marked as irreversible only in the revert plan.
self.assertEqual(
"Planned operations:\n"
"migrations.0005_fifth\n"
" Raw Python operation\n"
" Raw Python operation\n"
" Raw Python operation -> Feed salamander.\n",
out.getvalue(),
)
call_command("migrate", "migrations", "0005", verbosity=0)
out = io.StringIO()
call_command(
"migrate", "migrations", "0004", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0005_fifth\n"
" Raw Python operation -> IRREVERSIBLE\n"
" Raw Python operation -> IRREVERSIBLE\n"
" Raw Python operation\n",
out.getvalue(),
)
finally:
# Cleanup by unmigrating everything: fake the irreversible, then
# migrate all to zero.
call_command("migrate", "migrations", "0003", fake=True, verbosity=0)
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}
)
def test_showmigrations_no_migrations(self):
out = io.StringIO()
call_command("showmigrations", stdout=out, no_color=True)
self.assertEqual("migrations\n (no migrations)\n", out.getvalue().lower())
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app"]
)
def test_showmigrations_unmigrated_app(self):
out = io.StringIO()
call_command("showmigrations", "unmigrated_app", stdout=out, no_color=True)
try:
self.assertEqual(
"unmigrated_app\n (no migrations)\n", out.getvalue().lower()
)
finally:
# unmigrated_app.SillyModel has a foreign key to
# 'migrations.Tribble', but that model is only defined in a
# migration, so the global app registry never sees it and the
# reference is left dangling. Remove it to avoid problems in
# subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}
)
def test_showmigrations_plan_no_migrations(self):
"""
Tests --plan output of showmigrations command without migrations
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, no_color=True)
self.assertEqual("(no migrations)\n", out.getvalue().lower())
out = io.StringIO()
call_command(
"showmigrations", format="plan", stdout=out, verbosity=2, no_color=True
)
self.assertEqual("(no migrations)\n", out.getvalue().lower())
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}
)
def test_showmigrations_plan_squashed(self):
"""
Tests --plan output of showmigrations command with squashed migrations.
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto\n"
"[ ] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto ... (migrations.1_auto)\n"
"[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "3_squashed_5", verbosity=0)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto\n"
"[x] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto ... (migrations.1_auto)\n"
"[x] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower(),
)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.mutate_state_b",
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_showmigrations_plan_single_app_label(self):
"""
`showmigrations --plan app_label` output with a single app_label.
"""
# Single app with no dependencies on other apps.
out = io.StringIO()
call_command("showmigrations", "mutate_state_b", format="plan", stdout=out)
self.assertEqual(
"[ ] mutate_state_b.0001_initial\n[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
# Single app with dependencies.
out = io.StringIO()
call_command("showmigrations", "author_app", format="plan", stdout=out)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n",
out.getvalue(),
)
# Some migrations already applied.
call_command("migrate", "author_app", "0001", verbosity=0)
out = io.StringIO()
call_command("showmigrations", "author_app", format="plan", stdout=out)
self.assertEqual(
"[X] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n",
out.getvalue(),
)
# Cleanup by unmigrating author_app.
call_command("migrate", "author_app", "zero", verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.mutate_state_b",
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_showmigrations_plan_multiple_app_labels(self):
"""
`showmigrations --plan app_label` output with multiple app_labels.
"""
# Multiple apps: author_app depends on book_app; mutate_state_b doesn't
# depend on other apps.
out = io.StringIO()
call_command(
"showmigrations", "mutate_state_b", "author_app", format="plan", stdout=out
)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n"
"[ ] mutate_state_b.0001_initial\n"
"[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
# Multiple apps: args order shouldn't matter (the same result is
# expected as above).
out = io.StringIO()
call_command(
"showmigrations", "author_app", "mutate_state_b", format="plan", stdout=out
)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n"
"[ ] mutate_state_b.0001_initial\n"
"[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app"]
)
def test_showmigrations_plan_app_label_no_migrations(self):
out = io.StringIO()
call_command(
"showmigrations", "unmigrated_app", format="plan", stdout=out, no_color=True
)
try:
self.assertEqual("(no migrations)\n", out.getvalue())
finally:
# unmigrated_app.SillyModel has a foreign key to
# 'migrations.Tribble', but that model is only defined in a
# migration, so the global app registry never sees it and the
# reference is left dangling. Remove it to avoid problems in
# subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_forwards(self):
"""
sqlmigrate outputs forward looking SQL.
"""
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
lines = out.getvalue().splitlines()
if connection.features.can_rollback_ddl:
self.assertEqual(lines[0], connection.ops.start_transaction_sql())
self.assertEqual(lines[-1], connection.ops.end_transaction_sql())
lines = lines[1:-1]
self.assertEqual(
lines[:3],
[
"--",
"-- Create model Author",
"--",
],
)
self.assertIn(
"create table %s" % connection.ops.quote_name("migrations_author").lower(),
lines[3].lower(),
)
pos = lines.index("--", 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Create model Tribble",
"--",
],
)
self.assertIn(
"create table %s" % connection.ops.quote_name("migrations_tribble").lower(),
lines[pos + 3].lower(),
)
pos = lines.index("--", pos + 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Add field bool to tribble",
"--",
],
)
pos = lines.index("--", pos + 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Alter unique_together for author (1 constraint(s))",
"--",
],
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_backwards(self):
"""
sqlmigrate outputs reverse looking SQL.
"""
# Cannot generate the reverse SQL unless we've applied the migration.
call_command("migrate", "migrations", verbosity=0)
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True)
lines = out.getvalue().splitlines()
try:
if connection.features.can_rollback_ddl:
self.assertEqual(lines[0], connection.ops.start_transaction_sql())
self.assertEqual(lines[-1], connection.ops.end_transaction_sql())
lines = lines[1:-1]
self.assertEqual(
lines[:3],
[
"--",
"-- Alter unique_together for author (1 constraint(s))",
"--",
],
)
pos = lines.index("--", 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Add field bool to tribble",
"--",
],
)
pos = lines.index("--", pos + 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Create model Tribble",
"--",
],
)
next_pos = lines.index("--", pos + 3)
drop_table_sql = (
"drop table %s"
% connection.ops.quote_name("migrations_tribble").lower()
)
for line in lines[pos + 3 : next_pos]:
if drop_table_sql in line.lower():
break
else:
self.fail("DROP TABLE (tribble) not found.")
pos = next_pos
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Create model Author",
"--",
],
)
drop_table_sql = (
"drop table %s" % connection.ops.quote_name("migrations_author").lower()
)
for line in lines[pos + 3 :]:
if drop_table_sql in line.lower():
break
else:
self.fail("DROP TABLE (author) not found.")
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}
)
def test_sqlmigrate_for_non_atomic_migration(self):
"""
Transaction wrappers aren't shown for non-atomic migrations.
"""
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
queries = [q.strip() for q in output.splitlines()]
if connection.ops.start_transaction_sql():
self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries)
self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_for_non_transactional_databases(self):
"""
Transaction wrappers aren't shown for databases that don't support
transactional DDL.
"""
out = io.StringIO()
with mock.patch.object(connection.features, "can_rollback_ddl", False):
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
queries = [q.strip() for q in output.splitlines()]
start_transaction_sql = connection.ops.start_transaction_sql()
if start_transaction_sql:
self.assertNotIn(start_transaction_sql.lower(), queries)
self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_ambiguous_prefix_squashed_migrations(self):
msg = (
"More than one migration matches '0001' in app 'migrations'. "
"Please be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("sqlmigrate", "migrations", "0001")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_squashed_migration(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_squashed_0002", stdout=out)
output = out.getvalue().lower()
self.assertIn("-- create model author", output)
self.assertIn("-- create model book", output)
self.assertNotIn("-- create model tribble", output)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_replaced_migration(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_initial", stdout=out)
output = out.getvalue().lower()
self.assertIn("-- create model author", output)
self.assertIn("-- create model tribble", output)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_operations"}
)
def test_sqlmigrate_no_operations(self):
err = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_initial", stderr=err)
self.assertEqual(err.getvalue(), "No operations found.\n")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_noop"}
)
def test_sqlmigrate_noop(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
lines = out.getvalue().splitlines()
if connection.features.can_rollback_ddl:
lines = lines[1:-1]
self.assertEqual(
lines,
[
"--",
"-- Raw SQL operation",
"--",
"-- (no-op)",
],
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_manual_porting"}
)
def test_sqlmigrate_unrepresentable(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0002", stdout=out)
lines = out.getvalue().splitlines()
if connection.features.can_rollback_ddl:
lines = lines[1:-1]
self.assertEqual(
lines,
[
"--",
"-- Raw Python operation",
"--",
"-- THIS OPERATION CANNOT BE WRITTEN AS SQL",
],
)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.unmigrated_app",
],
)
def test_regression_22823_unmigrated_fk_to_migrated_model(self):
"""
Assuming you have 3 apps, `A`, `B`, and `C`, such that:
* `A` has migrations
* `B` has a migration we want to apply
* `C` has no migrations, but has an FK to `A`
When we try to migrate "B", an exception occurs because the
"B" was not included in the ProjectState that is used to detect
soft-applied migrations (#22823).
"""
call_command("migrate", "migrated_unapplied_app", verbosity=0)
# unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble',
# but that model is only defined in a migration, so the global app
# registry never sees it and the reference is left dangling. Remove it
# to avoid problems in subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app_syncdb"]
)
def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self):
"""
For an app without migrations, editor.execute() is used for executing
the syncdb deferred SQL.
"""
stdout = io.StringIO()
with mock.patch.object(BaseDatabaseSchemaEditor, "execute") as execute:
call_command(
"migrate", run_syncdb=True, verbosity=1, stdout=stdout, no_color=True
)
create_table_count = len(
[call for call in execute.mock_calls if "CREATE TABLE" in str(call)]
)
self.assertEqual(create_table_count, 2)
# There's at least one deferred SQL for creating the foreign key
# index.
self.assertGreater(len(execute.mock_calls), 2)
stdout = stdout.getvalue()
self.assertIn("Synchronize unmigrated apps: unmigrated_app_syncdb", stdout)
self.assertIn("Creating tables...", stdout)
table_name = truncate_name(
"unmigrated_app_syncdb_classroom", connection.ops.max_name_length()
)
self.assertIn("Creating table %s" % table_name, stdout)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_syncdb_app_with_migrations(self):
msg = "Can't use run_syncdb with app 'migrations' as it has migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", "migrations", run_syncdb=True, verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.unmigrated_app_syncdb",
"migrations.migrations_test_apps.unmigrated_app_simple",
]
)
def test_migrate_syncdb_app_label(self):
"""
Running migrate --run-syncdb with an app_label only creates tables for
the specified app.
"""
stdout = io.StringIO()
with mock.patch.object(BaseDatabaseSchemaEditor, "execute") as execute:
call_command(
"migrate", "unmigrated_app_syncdb", run_syncdb=True, stdout=stdout
)
create_table_count = len(
[call for call in execute.mock_calls if "CREATE TABLE" in str(call)]
)
self.assertEqual(create_table_count, 2)
self.assertGreater(len(execute.mock_calls), 2)
self.assertIn(
"Synchronize unmigrated app: unmigrated_app_syncdb", stdout.getvalue()
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_record_replaced(self):
"""
Running a single squashed migration should record all of the original
replaced migrations as run.
"""
recorder = MigrationRecorder(connection)
out = io.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
applied_migrations = recorder.applied_migrations()
self.assertIn(("migrations", "0001_initial"), applied_migrations)
self.assertIn(("migrations", "0002_second"), applied_migrations)
self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations)
# Rollback changes
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_record_squashed(self):
"""
Running migrate for a squashed migration should record as run
if all of the replaced migrations have been run (#25231).
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
out = io.StringIO()
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n"
" [-] 0001_squashed_0002 (2 squashed migrations) "
"run 'manage.py migrate' to finish recording.\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
self.assertIn(
("migrations", "0001_squashed_0002"), recorder.applied_migrations()
)
# No changes were actually applied so there is nothing to rollback
def test_migrate_partially_applied_squashed_migration(self):
"""
Migrating to a squashed migration specified by name should succeed
even if it is partially applied.
"""
with self.temporary_migration_module(module="migrations.test_migrations"):
recorder = MigrationRecorder(connection)
try:
call_command("migrate", "migrations", "0001_initial", verbosity=0)
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=0,
)
call_command(
"migrate",
"migrations",
"0001_squashed_0002_second",
verbosity=0,
)
applied_migrations = recorder.applied_migrations()
self.assertIn(("migrations", "0002_second"), applied_migrations)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_backward_to_squashed_migration(self):
try:
call_command("migrate", "migrations", "0001_squashed_0002", verbosity=0)
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
call_command("migrate", "migrations", "0001_initial", verbosity=0)
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_book")
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_inconsistent_history(self):
"""
Running migrate with some migrations applied before their dependencies
should not be allowed.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0002_second")
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial"
)
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
call_command("migrate")
applied_migrations = recorder.applied_migrations()
self.assertNotIn(("migrations", "0001_initial"), applied_migrations)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_not_reflected_changes(self):
class NewModel1(models.Model):
class Meta:
app_label = "migrated_app"
class NewModel2(models.Model):
class Meta:
app_label = "migrated_unapplied_app"
out = io.StringIO()
try:
call_command("migrate", verbosity=0)
call_command("migrate", stdout=out, no_color=True)
self.assertEqual(
"operations to perform:\n"
" apply all migrations: migrated_app, migrated_unapplied_app\n"
"running migrations:\n"
" no migrations to apply.\n"
" your models in app(s): 'migrated_app', "
"'migrated_unapplied_app' have changes that are not yet "
"reflected in a migration, and so won't be applied.\n"
" run 'manage.py makemigrations' to make new migrations, and "
"then re-run 'manage.py migrate' to apply them.\n",
out.getvalue().lower(),
)
finally:
# Unmigrate everything.
call_command("migrate", "migrated_app", "zero", verbosity=0)
call_command("migrate", "migrated_unapplied_app", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_squashed_no_replaces",
}
)
def test_migrate_prune(self):
"""
With prune=True, references to migration files deleted from the
migrations module (such as after being squashed) are removed from the
django_migrations table.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
recorder.record_applied("migrations", "0001_squashed_0002")
out = io.StringIO()
try:
call_command("migrate", "migrations", prune=True, stdout=out, no_color=True)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n"
" Pruning migrations.0001_initial OK\n"
" Pruning migrations.0002_second OK\n",
)
applied_migrations = [
migration
for migration in recorder.applied_migrations()
if migration[0] == "migrations"
]
self.assertEqual(applied_migrations, [("migrations", "0001_squashed_0002")])
finally:
recorder.record_unapplied("migrations", "0001_initial")
recorder.record_unapplied("migrations", "0001_second")
recorder.record_unapplied("migrations", "0001_squashed_0002")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_prune_deleted_squashed_migrations_in_replaces(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_squashed"
) as migration_dir:
try:
call_command("migrate", "migrations", verbosity=0)
# Delete the replaced migrations.
os.remove(os.path.join(migration_dir, "0001_initial.py"))
os.remove(os.path.join(migration_dir, "0002_second.py"))
# --prune cannot be used before removing the "replaces"
# attribute.
call_command(
"migrate",
"migrations",
prune=True,
stdout=out,
no_color=True,
)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n"
" Cannot use --prune because the following squashed "
"migrations have their 'replaces' attributes and may not "
"be recorded as applied:\n"
" migrations.0001_squashed_0002\n"
" Re-run 'manage.py migrate' if they are not marked as "
"applied, and remove 'replaces' attributes in their "
"Migration classes.\n",
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_prune_no_migrations_to_prune(self):
out = io.StringIO()
call_command("migrate", "migrations", prune=True, stdout=out, no_color=True)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n No migrations to prune.\n",
)
out = io.StringIO()
call_command(
"migrate",
"migrations",
prune=True,
stdout=out,
no_color=True,
verbosity=0,
)
self.assertEqual(out.getvalue(), "")
def test_prune_no_app_label(self):
msg = "Migrations can be pruned only when an app is specified."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", prune=True)
class MakeMigrationsTests(MigrationTestBase):
"""
Tests running the makemigrations command.
"""
def setUp(self):
super().setUp()
self._old_models = apps.app_configs["migrations"].models.copy()
def tearDown(self):
apps.app_configs["migrations"].models = self._old_models
apps.all_models["migrations"] = self._old_models
apps.clear_cache()
super().tearDown()
def test_files_content(self):
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model("migrations", UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
# Check for empty __init__.py file in migrations folder
init_file = os.path.join(migration_dir, "__init__.py")
self.assertTrue(os.path.exists(init_file))
with open(init_file) as fp:
content = fp.read()
self.assertEqual(content, "")
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with open(initial_file, encoding="utf-8") as fp:
content = fp.read()
self.assertIn("migrations.CreateModel", content)
self.assertIn("initial = True", content)
self.assertIn("úñí©óðé µóðéø", content) # Meta.verbose_name
self.assertIn("úñí©óðé µóðéøß", content) # Meta.verbose_name_plural
self.assertIn("ÚÑÍ¢ÓÐÉ", content) # title.verbose_name
self.assertIn("“Ðjáñgó”", content) # title.default
def test_makemigrations_order(self):
"""
makemigrations should recognize number-only migrations (0001.py).
"""
module = "migrations.test_migrations_order"
with self.temporary_migration_module(module=module) as migration_dir:
if hasattr(importlib, "invalidate_caches"):
# importlib caches os.listdir() on some platforms like macOS
# (#23850).
importlib.invalidate_caches()
call_command(
"makemigrations", "migrations", "--empty", "-n", "a", "-v", "0"
)
self.assertTrue(os.path.exists(os.path.join(migration_dir, "0002_a.py")))
def test_makemigrations_empty_connections(self):
empty_connections = ConnectionHandler({"default": {}})
with mock.patch(
"django.core.management.commands.makemigrations.connections",
new=empty_connections,
):
# with no apps
out = io.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
# with an app
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
init_file = os.path.join(migration_dir, "__init__.py")
self.assertTrue(os.path.exists(init_file))
@override_settings(INSTALLED_APPS=["migrations", "migrations2"])
def test_makemigrations_consistency_checks_respect_routers(self):
"""
The history consistency checks in makemigrations respect
settings.DATABASE_ROUTERS.
"""
def patched_has_table(migration_recorder):
if migration_recorder.connection is connections["other"]:
raise Exception("Other connection")
else:
return mock.DEFAULT
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model("migrations", UnicodeModel)
with mock.patch.object(
MigrationRecorder, "has_table", autospec=True, side_effect=patched_has_table
) as has_table:
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
self.assertEqual(has_table.call_count, 1) # 'default' is checked
# Router says not to migrate 'other' so consistency shouldn't
# be checked.
with self.settings(DATABASE_ROUTERS=["migrations.routers.TestRouter"]):
call_command("makemigrations", "migrations", verbosity=0)
self.assertEqual(has_table.call_count, 2) # 'default' again
# With a router that doesn't prohibit migrating 'other',
# consistency is checked.
with self.settings(
DATABASE_ROUTERS=["migrations.routers.DefaultOtherRouter"]
):
with self.assertRaisesMessage(Exception, "Other connection"):
call_command("makemigrations", "migrations", verbosity=0)
self.assertEqual(has_table.call_count, 4) # 'default' and 'other'
# With a router that doesn't allow migrating on any database,
# no consistency checks are made.
with self.settings(DATABASE_ROUTERS=["migrations.routers.TestRouter"]):
with mock.patch.object(
TestRouter, "allow_migrate", return_value=False
) as allow_migrate:
call_command("makemigrations", "migrations", verbosity=0)
allow_migrate.assert_any_call(
"other", "migrations", model_name="UnicodeModel"
)
# allow_migrate() is called with the correct arguments.
self.assertGreater(len(allow_migrate.mock_calls), 0)
called_aliases = set()
for mock_call in allow_migrate.mock_calls:
_, call_args, call_kwargs = mock_call
connection_alias, app_name = call_args
called_aliases.add(connection_alias)
# Raises an error if invalid app_name/model_name occurs.
apps.get_app_config(app_name).get_model(call_kwargs["model_name"])
self.assertEqual(called_aliases, set(connections))
self.assertEqual(has_table.call_count, 4)
def test_failing_migration(self):
# If a migration fails to serialize, it shouldn't generate an empty file. #21280
apps.register_model("migrations", UnserializableModel)
with self.temporary_migration_module() as migration_dir:
with self.assertRaisesMessage(ValueError, "Cannot serialize"):
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertFalse(os.path.exists(initial_file))
def test_makemigrations_conflict_exit(self):
"""
makemigrations exits if it detects a conflict.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
):
with self.assertRaises(CommandError) as context:
call_command("makemigrations")
self.assertEqual(
str(context.exception),
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (0002_conflicting_second, 0002_second in "
"migrations).\n"
"To fix them run 'python manage.py makemigrations --merge'",
)
def test_makemigrations_merge_no_conflict(self):
"""
makemigrations exits if in merge mode with no conflicts.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("makemigrations", merge=True, stdout=out)
self.assertIn("No conflicts detected to merge.", out.getvalue())
def test_makemigrations_empty_no_app_specified(self):
"""
makemigrations exits if no app is specified with 'empty' mode.
"""
msg = "You must supply at least one app label when using --empty."
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", empty=True)
def test_makemigrations_empty_migration(self):
"""
makemigrations properly constructs an empty migration.
"""
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", empty=True, verbosity=0)
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with open(initial_file, encoding="utf-8") as fp:
content = fp.read()
# Remove all whitespace to check for empty dependencies and operations
content = content.replace(" ", "")
self.assertIn(
"dependencies=[]" if HAS_BLACK else "dependencies=[\n]", content
)
self.assertIn(
"operations=[]" if HAS_BLACK else "operations=[\n]", content
)
@override_settings(MIGRATION_MODULES={"migrations": None})
def test_makemigrations_disabled_migrations_for_app(self):
"""
makemigrations raises a nice error when migrations are disabled for an
app.
"""
msg = (
"Django can't create migrations for app 'migrations' because migrations "
"have been disabled via the MIGRATION_MODULES setting."
)
with self.assertRaisesMessage(ValueError, msg):
call_command("makemigrations", "migrations", empty=True, verbosity=0)
def test_makemigrations_no_changes_no_apps(self):
"""
makemigrations exits when there are no changes and no apps are specified.
"""
out = io.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
def test_makemigrations_no_changes(self):
"""
makemigrations exits when there are no changes to an app.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("No changes detected in app 'migrations'", out.getvalue())
def test_makemigrations_no_apps_initial(self):
"""
makemigrations should detect initial is needed on empty migration
modules if no app provided.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_empty"):
call_command("makemigrations", stdout=out)
self.assertIn("0001_initial.py", out.getvalue())
def test_makemigrations_no_init(self):
"""Migration directories without an __init__.py file are allowed."""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_init"
):
call_command("makemigrations", stdout=out)
self.assertIn("0001_initial.py", out.getvalue())
def test_makemigrations_migrations_announce(self):
"""
makemigrations announces the migration at the default verbosity level.
"""
out = io.StringIO()
with self.temporary_migration_module():
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("Migrations for 'migrations'", out.getvalue())
def test_makemigrations_no_common_ancestor(self):
"""
makemigrations fails to merge migrations with no common ancestor.
"""
with self.assertRaises(ValueError) as context:
with self.temporary_migration_module(
module="migrations.test_migrations_no_ancestor"
):
call_command("makemigrations", "migrations", merge=True)
exception_message = str(context.exception)
self.assertIn("Could not find common ancestor of", exception_message)
self.assertIn("0002_second", exception_message)
self.assertIn("0002_conflicting_second", exception_message)
def test_makemigrations_interactive_reject(self):
"""
makemigrations enters and exits interactive mode properly.
"""
# Monkeypatch interactive questioner to auto reject
with mock.patch("builtins.input", mock.Mock(return_value="N")):
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
with captured_stdout():
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=True,
verbosity=0,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
def test_makemigrations_interactive_accept(self):
"""
makemigrations enters interactive mode and merges properly.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertTrue(os.path.exists(merge_file))
self.assertIn("Created new merge migration", out.getvalue())
def test_makemigrations_default_merge_name(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(
migration_dir,
"0003_merge_0002_conflicting_second_0002_second.py",
)
self.assertIs(os.path.exists(merge_file), True)
with open(merge_file, encoding="utf-8") as fp:
content = fp.read()
if HAS_BLACK:
target_str = '("migrations", "0002_conflicting_second")'
else:
target_str = "('migrations', '0002_conflicting_second')"
self.assertIn(target_str, content)
self.assertIn("Created new merge migration %s" % merge_file, out.getvalue())
@mock.patch("django.db.migrations.utils.datetime")
def test_makemigrations_auto_merge_name(self, mock_datetime):
mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4)
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict_long_name"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge_20160102_0304.py")
self.assertTrue(os.path.exists(merge_file))
self.assertIn("Created new merge migration", out.getvalue())
def test_makemigrations_non_interactive_not_null_addition(self):
"""
Non-interactive makemigrations fails when a default is missing on a
new not-null field.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_int = models.IntegerField()
class Meta:
app_label = "migrations"
with self.assertRaises(SystemExit):
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn(
"Field 'silly_int' on model 'sillymodel' not migrated: it is "
"impossible to add a non-nullable field without specifying a "
"default.",
out.getvalue(),
)
def test_makemigrations_interactive_not_null_addition(self):
"""
makemigrations messages when adding a NOT NULL field in interactive
mode.
"""
class Author(models.Model):
silly_field = models.BooleanField(null=False)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to add a non-nullable field 'silly_field' to "
"author without specifying a default. This is because the "
"database needs something to populate existing rows.\n"
"Please select a fix:\n"
" 1) Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)\n"
" 2) Quit and manually define a default value in models.py."
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# 2 - quit.
with mock.patch("builtins.input", return_value="2"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(input_msg, out.getvalue())
# 1 - provide a default.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
output = out.getvalue()
self.assertIn(input_msg, output)
self.assertIn("Please enter the default value as valid Python.", output)
self.assertIn(
"The datetime and django.utils.timezone modules are "
"available, so it is possible to provide e.g. timezone.now as "
"a value",
output,
)
self.assertIn("Type 'exit' to exit this prompt", output)
def test_makemigrations_non_interactive_not_null_alteration(self):
"""
Non-interactive makemigrations fails when a default is missing on a
field changed to not-null.
"""
class Author(models.Model):
name = models.CharField(max_length=255)
slug = models.SlugField()
age = models.IntegerField(default=0)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn("Alter field slug on author", out.getvalue())
self.assertIn(
"Field 'slug' on model 'author' given a default of NOT PROVIDED "
"and must be corrected.",
out.getvalue(),
)
def test_makemigrations_interactive_not_null_alteration(self):
"""
makemigrations messages when changing a NULL field to NOT NULL in
interactive mode.
"""
class Author(models.Model):
slug = models.SlugField(null=False)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to change a nullable field 'slug' on author to "
"non-nullable without providing a default. This is because the "
"database needs something to populate existing rows.\n"
"Please select a fix:\n"
" 1) Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)\n"
" 2) Ignore for now. Existing rows that contain NULL values will "
"have to be handled manually, for example with a RunPython or "
"RunSQL operation.\n"
" 3) Quit and manually define a default value in models.py."
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# No message appears if --dry-run.
with captured_stdout() as out:
call_command(
"makemigrations",
"migrations",
interactive=True,
dry_run=True,
)
self.assertNotIn(input_msg, out.getvalue())
# 3 - quit.
with mock.patch("builtins.input", return_value="3"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(input_msg, out.getvalue())
# 1 - provide a default.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
output = out.getvalue()
self.assertIn(input_msg, output)
self.assertIn("Please enter the default value as valid Python.", output)
self.assertIn(
"The datetime and django.utils.timezone modules are "
"available, so it is possible to provide e.g. timezone.now as "
"a value",
output,
)
self.assertIn("Type 'exit' to exit this prompt", output)
def test_makemigrations_non_interactive_no_model_rename(self):
"""
makemigrations adds and removes a possible model rename in
non-interactive mode.
"""
class RenamedModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Delete model SillyModel", out.getvalue())
self.assertIn("Create model RenamedModel", out.getvalue())
def test_makemigrations_non_interactive_no_field_rename(self):
"""
makemigrations adds and removes a possible field rename in
non-interactive mode.
"""
class SillyModel(models.Model):
silly_rename = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Remove field silly_field from sillymodel", out.getvalue())
self.assertIn("Add field silly_rename to sillymodel", out.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_model_rename_interactive(self, mock_input):
class RenamedModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(
module="migrations.test_migrations_no_default",
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
self.assertIn("Rename model SillyModel to RenamedModel", out.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_field_rename_interactive(self, mock_input):
class SillyModel(models.Model):
silly_rename = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(
module="migrations.test_migrations_no_default",
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(
"Rename field silly_field on sillymodel to silly_rename",
out.getvalue(),
)
def test_makemigrations_handle_merge(self):
"""
makemigrations properly merges the conflicting migrations with --noinput.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertTrue(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertIn("Created new merge migration", output)
def test_makemigration_merge_dry_run(self):
"""
makemigrations respects --dry-run option when fixing migration
conflicts (#24427).
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
dry_run=True,
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
def test_makemigration_merge_dry_run_verbosity_3(self):
"""
`makemigrations --merge --dry-run` writes the merge migration file to
stdout with `verbosity == 3` (#24427).
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
dry_run=True,
merge=True,
interactive=False,
stdout=out,
verbosity=3,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
# Additional output caused by verbosity 3
# The complete merge migration file that would be written
self.assertIn("class Migration(migrations.Migration):", output)
self.assertIn("dependencies = [", output)
self.assertIn("('migrations', '0002_second')", output)
self.assertIn("('migrations', '0002_conflicting_second')", output)
self.assertIn("operations = [", output)
self.assertIn("]", output)
def test_makemigrations_dry_run(self):
"""
`makemigrations --dry-run` should not ask for defaults.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_date = models.DateField() # Added field without a default
silly_auto_now = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", dry_run=True, stdout=out)
# Output the expected changes directly, without asking for defaults
self.assertIn("Add field silly_date to sillymodel", out.getvalue())
def test_makemigrations_dry_run_verbosity_3(self):
"""
Allow `makemigrations --dry-run` to output the migrations file to
stdout (with verbosity == 3).
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_char = models.CharField(default="")
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command(
"makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3
)
# Normal --dry-run output
self.assertIn("- Add field silly_char to sillymodel", out.getvalue())
# Additional output caused by verbosity 3
# The complete migrations file that would be written
self.assertIn("class Migration(migrations.Migration):", out.getvalue())
self.assertIn("dependencies = [", out.getvalue())
self.assertIn("('migrations', '0001_initial'),", out.getvalue())
self.assertIn("migrations.AddField(", out.getvalue())
self.assertIn("model_name='sillymodel',", out.getvalue())
self.assertIn("name='silly_char',", out.getvalue())
def test_makemigrations_scriptable(self):
"""
With scriptable=True, log output is diverted to stderr, and only the
paths of generated migration files are written to stdout.
"""
out = io.StringIO()
err = io.StringIO()
with self.temporary_migration_module(
module="migrations.migrations.test_migrations",
) as migration_dir:
call_command(
"makemigrations",
"migrations",
scriptable=True,
stdout=out,
stderr=err,
)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertEqual(out.getvalue(), f"{initial_file}\n")
self.assertIn(" - Create model ModelWithCustomBase\n", err.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_scriptable_merge(self, mock_input):
out = io.StringIO()
err = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict",
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
name="merge",
scriptable=True,
stdout=out,
stderr=err,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertEqual(out.getvalue(), f"{merge_file}\n")
self.assertIn(f"Created new merge migration {merge_file}", err.getvalue())
def test_makemigrations_migrations_modules_path_not_exist(self):
"""
makemigrations creates migrations when specifying a custom location
for migration files using MIGRATION_MODULES if the custom path
doesn't already exist.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar"
with self.temporary_migration_module(module=migration_module) as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
# Migrations file is actually created in the expected path.
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
# Command output indicates the migration is created.
self.assertIn(" - Create model SillyModel", out.getvalue())
@override_settings(MIGRATION_MODULES={"migrations": "some.nonexistent.path"})
def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self):
msg = (
"Could not locate an appropriate location to create migrations "
"package some.nonexistent.path. Make sure the toplevel package "
"exists and can be imported."
)
with self.assertRaisesMessage(ValueError, msg):
call_command("makemigrations", "migrations", empty=True, verbosity=0)
def test_makemigrations_interactive_by_default(self):
"""
The user is prompted to merge by default if there are conflicts and
merge is True. Answer negative to differentiate it from behavior when
--noinput is specified.
"""
# Monkeypatch interactive questioner to auto reject
out = io.StringIO()
with mock.patch("builtins.input", mock.Mock(return_value="N")):
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations", "migrations", name="merge", merge=True, stdout=out
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
# This will fail if interactive is False by default
self.assertFalse(os.path.exists(merge_file))
self.assertNotIn("Created new merge migration", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unspecified_app_with_conflict",
]
)
def test_makemigrations_unspecified_app_with_conflict_no_merge(self):
"""
makemigrations does not raise a CommandError when an unspecified app
has conflicting migrations.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "migrations", merge=False, verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.unspecified_app_with_conflict",
]
)
def test_makemigrations_unspecified_app_with_conflict_merge(self):
"""
makemigrations does not create a merge for an unspecified app even if
it has conflicting migrations.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
app_label="migrated_app"
) as migration_dir:
call_command(
"makemigrations",
"migrated_app",
name="merge",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
self.assertIn("No conflicts detected to merge.", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.conflicting_app_with_dependencies",
]
)
def test_makemigrations_merge_dont_output_dependency_operations(self):
"""
makemigrations --merge does not output any operations from apps that
don't belong to a given app.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="N")):
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"makemigrations",
"conflicting_app_with_dependencies",
merge=True,
interactive=True,
stdout=out,
)
self.assertEqual(
out.getvalue().lower(),
"merging conflicting_app_with_dependencies\n"
" branch 0002_conflicting_second\n"
" - create model something\n"
" branch 0002_second\n"
" - delete model tribble\n"
" - remove field silly_field from author\n"
" - add field rating to author\n"
" - create model book\n"
"\n"
"merging will only work if the operations printed above do not "
"conflict\n"
"with each other (working on different fields or models)\n"
"should these migration branches be merged? [y/n] ",
)
def test_makemigrations_with_custom_name(self):
"""
makemigrations --name generate a custom migration name.
"""
with self.temporary_migration_module() as migration_dir:
def cmd(migration_count, migration_name, *args):
call_command(
"makemigrations",
"migrations",
"--verbosity",
"0",
"--name",
migration_name,
*args,
)
migration_file = os.path.join(
migration_dir, "%s_%s.py" % (migration_count, migration_name)
)
# Check for existing migration file in migration folder
self.assertTrue(os.path.exists(migration_file))
with open(migration_file, encoding="utf-8") as fp:
content = fp.read()
content = content.replace(" ", "")
return content
# generate an initial migration
migration_name_0001 = "my_initial_migration"
content = cmd("0001", migration_name_0001)
self.assertIn(
"dependencies=[]" if HAS_BLACK else "dependencies=[\n]", content
)
# importlib caches os.listdir() on some platforms like macOS
# (#23850).
if hasattr(importlib, "invalidate_caches"):
importlib.invalidate_caches()
# generate an empty migration
migration_name_0002 = "my_custom_migration"
content = cmd("0002", migration_name_0002, "--empty")
if HAS_BLACK:
template_str = 'dependencies=[\n("migrations","0001_%s"),\n]'
else:
template_str = "dependencies=[\n('migrations','0001_%s'),\n]"
self.assertIn(
template_str % migration_name_0001,
content,
)
self.assertIn("operations=[]" if HAS_BLACK else "operations=[\n]", content)
def test_makemigrations_with_invalid_custom_name(self):
msg = "The migration name must be a valid Python identifier."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"makemigrations", "migrations", "--name", "invalid name", "--empty"
)
def test_makemigrations_check(self):
"""
makemigrations --check should exit with a non-zero status when
there are changes to an app requiring migrations.
"""
with self.temporary_migration_module():
with self.assertRaises(SystemExit):
call_command("makemigrations", "--check", "migrations", verbosity=0)
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "--check", "migrations", verbosity=0)
def test_makemigrations_migration_path_output(self):
"""
makemigrations should print the relative paths to the migrations unless
they are outside of the current tree, in which case the absolute path
should be shown.
"""
out = io.StringIO()
apps.register_model("migrations", UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
self.assertIn(
os.path.join(migration_dir, "0001_initial.py"), out.getvalue()
)
def test_makemigrations_migration_path_output_valueerror(self):
"""
makemigrations prints the absolute path if os.path.relpath() raises a
ValueError when it's impossible to obtain a relative path, e.g. on
Windows if Django is installed on a different drive than where the
migration files are created.
"""
out = io.StringIO()
with self.temporary_migration_module() as migration_dir:
with mock.patch("os.path.relpath", side_effect=ValueError):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn(os.path.join(migration_dir, "0001_initial.py"), out.getvalue())
def test_makemigrations_inconsistent_history(self):
"""
makemigrations should raise InconsistentMigrationHistory exception if
there are some migrations applied before their dependencies.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0002_second")
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial"
)
with self.temporary_migration_module(module="migrations.test_migrations"):
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
call_command("makemigrations")
def test_makemigrations_inconsistent_history_db_failure(self):
msg = (
"Got an error checking a consistent migration history performed "
"for database connection 'default': could not connect to server"
)
with mock.patch(
"django.db.migrations.loader.MigrationLoader.check_consistent_history",
side_effect=OperationalError("could not connect to server"),
):
with self.temporary_migration_module():
with self.assertWarns(RuntimeWarning) as cm:
call_command("makemigrations", verbosity=0)
self.assertEqual(str(cm.warning), msg)
@mock.patch("builtins.input", return_value="1")
@mock.patch(
"django.db.migrations.questioner.sys.stdin",
mock.MagicMock(encoding=sys.getdefaultencoding()),
)
def test_makemigrations_auto_now_add_interactive(self, *args):
"""
makemigrations prompts the user when adding auto_now_add to an existing
model.
"""
class Entry(models.Model):
title = models.CharField(max_length=255)
creation_date = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to add the field 'creation_date' with "
"'auto_now_add=True' to entry without providing a default. This "
"is because the database needs something to populate existing "
"rows.\n"
" 1) Provide a one-off default now which will be set on all "
"existing rows\n"
" 2) Quit and manually define a default value in models.py."
)
# Monkeypatch interactive questioner to auto accept
prompt_stdout = io.StringIO()
with self.temporary_migration_module(module="migrations.test_auto_now_add"):
call_command(
"makemigrations", "migrations", interactive=True, stdout=prompt_stdout
)
prompt_output = prompt_stdout.getvalue()
self.assertIn(input_msg, prompt_output)
self.assertIn("Please enter the default value as valid Python.", prompt_output)
self.assertIn(
"Accept the default 'timezone.now' by pressing 'Enter' or provide "
"another value.",
prompt_output,
)
self.assertIn("Type 'exit' to exit this prompt", prompt_output)
self.assertIn("Add field creation_date to entry", prompt_output)
@mock.patch("builtins.input", return_value="2")
def test_makemigrations_auto_now_add_interactive_quit(self, mock_input):
class Author(models.Model):
publishing_date = models.DateField(auto_now_add=True)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout():
with self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
def test_makemigrations_non_interactive_auto_now_add_addition(self):
"""
Non-interactive makemigrations fails when a default is missing on a
new field when auto_now_add=True.
"""
class Entry(models.Model):
creation_date = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_auto_now_add"):
with self.assertRaises(SystemExit), captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn(
"Field 'creation_date' on model 'entry' not migrated: it is "
"impossible to add a field with 'auto_now_add=True' without "
"specifying a default.",
out.getvalue(),
)
def test_makemigrations_interactive_unique_callable_default_addition(self):
"""
makemigrations prompts the user when adding a unique field with
a callable default.
"""
class Book(models.Model):
created = models.DateTimeField(unique=True, default=timezone.now)
class Meta:
app_label = "migrations"
version = get_docs_version()
input_msg = (
f"Callable default on unique field book.created will not generate "
f"unique values upon migrating.\n"
f"Please choose how to proceed:\n"
f" 1) Continue making this migration as the first step in writing "
f"a manual migration to generate unique values described here: "
f"https://docs.djangoproject.com/en/{version}/howto/"
f"writing-migrations/#migrations-that-add-unique-fields.\n"
f" 2) Quit and edit field options in models.py.\n"
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# 2 - quit.
with mock.patch("builtins.input", return_value="2"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
out_value = out.getvalue()
self.assertIn(input_msg, out_value)
self.assertNotIn("Add field created to book", out_value)
# 1 - continue.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
out_value = out.getvalue()
self.assertIn(input_msg, out_value)
self.assertIn("Add field created to book", out_value)
def test_makemigrations_non_interactive_unique_callable_default_addition(self):
class Book(models.Model):
created = models.DateTimeField(unique=True, default=timezone.now)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
out_value = out.getvalue()
self.assertIn("Add field created to book", out_value)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"},
)
def test_makemigrations_continues_number_sequence_after_squash(self):
with self.temporary_migration_module(
module="migrations.test_migrations_squashed"
):
with captured_stdout() as out:
call_command(
"makemigrations",
"migrations",
interactive=False,
empty=True,
)
out_value = out.getvalue()
self.assertIn("0003_auto", out_value)
class SquashMigrationsTests(MigrationTestBase):
"""
Tests running the squashmigrations command.
"""
def test_squashmigrations_squashes(self):
"""
squashmigrations squashes migrations.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
stdout=out,
no_color=True,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_squashed_0002_second.py"
)
self.assertTrue(os.path.exists(squashed_migration_file))
self.assertEqual(
out.getvalue(),
"Will squash the following migrations:\n"
" - 0001_initial\n"
" - 0002_second\n"
"Optimizing...\n"
" Optimized from 8 operations to 2 operations.\n"
"Created new squashed migration %s\n"
" You should commit this migration but leave the old ones in place;\n"
" the new migration will be used for new installs. Once you are sure\n"
" all instances of the codebase have applied the migrations you "
"squashed,\n"
" you can delete them.\n" % squashed_migration_file,
)
def test_squashmigrations_initial_attribute(self):
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations", "migrations", "0002", interactive=False, verbosity=0
)
squashed_migration_file = os.path.join(
migration_dir, "0001_squashed_0002_second.py"
)
with open(squashed_migration_file, encoding="utf-8") as fp:
content = fp.read()
self.assertIn("initial = True", content)
def test_squashmigrations_optimizes(self):
"""
squashmigrations optimizes operations.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=1,
stdout=out,
)
self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue())
def test_ticket_23799_squashmigrations_no_optimize(self):
"""
squashmigrations --no-optimize doesn't optimize operations.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=1,
no_optimize=True,
stdout=out,
)
self.assertIn("Skipping optimization", out.getvalue())
def test_squashmigrations_valid_start(self):
"""
squashmigrations accepts a starting migration.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
"0003",
interactive=False,
verbosity=1,
stdout=out,
)
squashed_migration_file = os.path.join(
migration_dir, "0002_second_squashed_0003_third.py"
)
with open(squashed_migration_file, encoding="utf-8") as fp:
content = fp.read()
if HAS_BLACK:
test_str = ' ("migrations", "0001_initial")'
else:
test_str = " ('migrations', '0001_initial')"
self.assertIn(test_str, content)
self.assertNotIn("initial = True", content)
out = out.getvalue()
self.assertNotIn(" - 0001_initial", out)
self.assertIn(" - 0002_second", out)
self.assertIn(" - 0003_third", out)
def test_squashmigrations_invalid_start(self):
"""
squashmigrations doesn't accept a starting migration after the ending migration.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
msg = (
"The migration 'migrations.0003_third' cannot be found. Maybe "
"it comes after the migration 'migrations.0002_second'"
)
with self.assertRaisesMessage(CommandError, msg):
call_command(
"squashmigrations",
"migrations",
"0003",
"0002",
interactive=False,
verbosity=0,
)
def test_squashed_name_with_start_migration_name(self):
"""--squashed-name specifies the new migration's name."""
squashed_name = "squashed_name"
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0001",
"0002",
squashed_name=squashed_name,
interactive=False,
verbosity=0,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_%s.py" % squashed_name
)
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashed_name_without_start_migration_name(self):
"""--squashed-name also works if a start migration is omitted."""
squashed_name = "squashed_name"
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0001",
squashed_name=squashed_name,
interactive=False,
verbosity=0,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_%s.py" % squashed_name
)
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashed_name_exists(self):
msg = "Migration 0001_initial already exists. Use a different name."
with self.temporary_migration_module(module="migrations.test_migrations"):
with self.assertRaisesMessage(CommandError, msg):
call_command(
"squashmigrations",
"migrations",
"0001",
"0002",
squashed_name="initial",
interactive=False,
verbosity=0,
)
def test_squashmigrations_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting",
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
stdout=out,
no_color=True,
)
squashed_migration_file = os.path.join(
migration_dir,
"0001_squashed_0002_second.py",
)
self.assertTrue(os.path.exists(squashed_migration_file))
black_warning = ""
if HAS_BLACK:
black_warning = (
"Squashed migration couldn't be formatted using the "
'"black" command. You can call it manually.\n'
)
self.assertEqual(
out.getvalue(),
f"Will squash the following migrations:\n"
f" - 0001_initial\n"
f" - 0002_second\n"
f"Optimizing...\n"
f" No optimizations possible.\n"
f"Created new squashed migration {squashed_migration_file}\n"
f" You should commit this migration but leave the old ones in place;\n"
f" the new migration will be used for new installs. Once you are sure\n"
f" all instances of the codebase have applied the migrations you "
f"squashed,\n"
f" you can delete them.\n"
f"Manual porting required\n"
f" Your migrations contained functions that must be manually copied "
f"over,\n"
f" as we could not safely copy their implementation.\n"
f" See the comment at the top of the squashed migration for details.\n"
+ black_warning,
)
class AppLabelErrorTests(TestCase):
"""
This class inherits TestCase because MigrationTestBase uses
`available_apps = ['migrations']` which means that it's the only installed
app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these
tests.
"""
nonexistent_app_error = "No installed app with label 'nonexistent_app'."
did_you_mean_auth_error = (
"No installed app with label 'django.contrib.auth'. Did you mean 'auth'?"
)
def test_makemigrations_nonexistent_app_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("makemigrations", "nonexistent_app", stderr=err)
self.assertIn(self.nonexistent_app_error, err.getvalue())
def test_makemigrations_app_name_specified_as_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("makemigrations", "django.contrib.auth", stderr=err)
self.assertIn(self.did_you_mean_auth_error, err.getvalue())
def test_migrate_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("migrate", "nonexistent_app")
def test_migrate_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("migrate", "django.contrib.auth")
def test_showmigrations_nonexistent_app_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("showmigrations", "nonexistent_app", stderr=err)
self.assertIn(self.nonexistent_app_error, err.getvalue())
def test_showmigrations_app_name_specified_as_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("showmigrations", "django.contrib.auth", stderr=err)
self.assertIn(self.did_you_mean_auth_error, err.getvalue())
def test_sqlmigrate_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("sqlmigrate", "nonexistent_app", "0002")
def test_sqlmigrate_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("sqlmigrate", "django.contrib.auth", "0002")
def test_squashmigrations_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("squashmigrations", "nonexistent_app", "0002")
def test_squashmigrations_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("squashmigrations", "django.contrib.auth", "0002")
def test_optimizemigration_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("optimizemigration", "nonexistent_app", "0002")
def test_optimizemigration_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("optimizemigration", "django.contrib.auth", "0002")
class OptimizeMigrationTests(MigrationTestBase):
def test_no_optimization_possible(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0002", stdout=out, no_color=True
)
migration_file = os.path.join(migration_dir, "0002_second.py")
self.assertTrue(os.path.exists(migration_file))
call_command(
"optimizemigration",
"migrations",
"0002",
stdout=out,
no_color=True,
verbosity=0,
)
self.assertEqual(out.getvalue(), "No optimizations possible.\n")
def test_optimization(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0001", stdout=out, no_color=True
)
initial_migration_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_migration_file))
with open(initial_migration_file) as fp:
content = fp.read()
self.assertIn(
'("bool", models.BooleanField'
if HAS_BLACK
else "('bool', models.BooleanField",
content,
)
self.assertEqual(
out.getvalue(),
f"Optimizing from 4 operations to 2 operations.\n"
f"Optimized migration {initial_migration_file}\n",
)
def test_optimization_no_verbosity(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration",
"migrations",
"0001",
stdout=out,
no_color=True,
verbosity=0,
)
initial_migration_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_migration_file))
with open(initial_migration_file) as fp:
content = fp.read()
self.assertIn(
'("bool", models.BooleanField'
if HAS_BLACK
else "('bool', models.BooleanField",
content,
)
self.assertEqual(out.getvalue(), "")
def test_creates_replace_migration_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0003", stdout=out, no_color=True
)
optimized_migration_file = os.path.join(
migration_dir, "0003_third_optimized.py"
)
self.assertTrue(os.path.exists(optimized_migration_file))
with open(optimized_migration_file) as fp:
content = fp.read()
self.assertIn("replaces = [", content)
black_warning = ""
if HAS_BLACK:
black_warning = (
"Optimized migration couldn't be formatted using the "
'"black" command. You can call it manually.\n'
)
self.assertEqual(
out.getvalue(),
"Optimizing from 3 operations to 2 operations.\n"
"Manual porting required\n"
" Your migrations contained functions that must be manually copied over,\n"
" as we could not safely copy their implementation.\n"
" See the comment at the top of the optimized migration for details.\n"
+ black_warning
+ f"Optimized migration {optimized_migration_file}\n",
)
def test_fails_squash_migration_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting"
) as migration_dir:
msg = (
"Migration will require manual porting but is already a squashed "
"migration.\nTransition to a normal migration first: "
"https://docs.djangoproject.com/en/dev/topics/migrations/"
"#squashing-migrations"
)
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "0004", stdout=out)
optimized_migration_file = os.path.join(
migration_dir, "0004_fourth_optimized.py"
)
self.assertFalse(os.path.exists(optimized_migration_file))
self.assertEqual(
out.getvalue(), "Optimizing from 3 operations to 2 operations.\n"
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_optimizemigration_check(self):
with self.assertRaises(SystemExit):
call_command(
"optimizemigration", "--check", "migrations", "0001", verbosity=0
)
call_command("optimizemigration", "--check", "migrations", "0002", verbosity=0)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app_simple"],
)
def test_app_without_migrations(self):
msg = "App 'unmigrated_app_simple' does not have migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "unmigrated_app_simple", "0001")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_clashing_prefix"},
)
def test_ambigious_prefix(self):
msg = (
"More than one migration matches 'a' in app 'migrations'. Please "
"be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "a")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_unknown_prefix(self):
msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'."
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "nonexistent")
|
a13c27fc05070b93da41e62b27c22d9a91353958b4d022525751c81a827c7a01 | import os
import shutil
import tempfile
from contextlib import contextmanager
from importlib import import_module
from django.apps import apps
from django.db import connection, connections, migrations, models
from django.db.migrations.migration import Migration
from django.db.migrations.recorder import MigrationRecorder
from django.db.migrations.state import ProjectState
from django.test import TransactionTestCase
from django.test.utils import extend_sys_path
from django.utils.module_loading import module_dir
class MigrationTestBase(TransactionTestCase):
"""
Contains an extended set of asserts for testing migrations and schema operations.
"""
available_apps = ["migrations"]
databases = {"default", "other"}
def tearDown(self):
# Reset applied-migrations state.
for db in self.databases:
recorder = MigrationRecorder(connections[db])
recorder.migration_qs.filter(app="migrations").delete()
def get_table_description(self, table, using="default"):
with connections[using].cursor() as cursor:
return connections[using].introspection.get_table_description(cursor, table)
def assertTableExists(self, table, using="default"):
with connections[using].cursor() as cursor:
self.assertIn(table, connections[using].introspection.table_names(cursor))
def assertTableNotExists(self, table, using="default"):
with connections[using].cursor() as cursor:
self.assertNotIn(
table, connections[using].introspection.table_names(cursor)
)
def assertColumnExists(self, table, column, using="default"):
self.assertIn(
column, [c.name for c in self.get_table_description(table, using=using)]
)
def assertColumnNotExists(self, table, column, using="default"):
self.assertNotIn(
column, [c.name for c in self.get_table_description(table, using=using)]
)
def _get_column_allows_null(self, table, column, using):
return [
c.null_ok
for c in self.get_table_description(table, using=using)
if c.name == column
][0]
def assertColumnNull(self, table, column, using="default"):
self.assertTrue(self._get_column_allows_null(table, column, using))
def assertColumnNotNull(self, table, column, using="default"):
self.assertFalse(self._get_column_allows_null(table, column, using))
def assertIndexExists(
self, table, columns, value=True, using="default", index_type=None
):
with connections[using].cursor() as cursor:
self.assertEqual(
value,
any(
c["index"]
for c in connections[using]
.introspection.get_constraints(cursor, table)
.values()
if (
c["columns"] == list(columns)
and (index_type is None or c["type"] == index_type)
and not c["unique"]
)
),
)
def assertIndexNotExists(self, table, columns):
return self.assertIndexExists(table, columns, False)
def assertIndexNameExists(self, table, index, using="default"):
with connections[using].cursor() as cursor:
self.assertIn(
index,
connection.introspection.get_constraints(cursor, table),
)
def assertIndexNameNotExists(self, table, index, using="default"):
with connections[using].cursor() as cursor:
self.assertNotIn(
index,
connection.introspection.get_constraints(cursor, table),
)
def assertConstraintExists(self, table, name, value=True, using="default"):
with connections[using].cursor() as cursor:
constraints = (
connections[using].introspection.get_constraints(cursor, table).items()
)
self.assertEqual(
value,
any(c["check"] for n, c in constraints if n == name),
)
def assertConstraintNotExists(self, table, name):
return self.assertConstraintExists(table, name, False)
def assertUniqueConstraintExists(self, table, columns, value=True, using="default"):
with connections[using].cursor() as cursor:
constraints = (
connections[using].introspection.get_constraints(cursor, table).values()
)
self.assertEqual(
value,
any(c["unique"] for c in constraints if c["columns"] == list(columns)),
)
def assertFKExists(self, table, columns, to, value=True, using="default"):
if not connections[using].features.can_introspect_foreign_keys:
return
with connections[using].cursor() as cursor:
self.assertEqual(
value,
any(
c["foreign_key"] == to
for c in connections[using]
.introspection.get_constraints(cursor, table)
.values()
if c["columns"] == list(columns)
),
)
def assertFKNotExists(self, table, columns, to):
return self.assertFKExists(table, columns, to, False)
@contextmanager
def temporary_migration_module(self, app_label="migrations", module=None):
"""
Allows testing management commands in a temporary migrations module.
Wrap all invocations to makemigrations and squashmigrations with this
context manager in order to avoid creating migration files in your
source tree inadvertently.
Takes the application label that will be passed to makemigrations or
squashmigrations and the Python path to a migrations module.
The migrations module is used as a template for creating the temporary
migrations module. If it isn't provided, the application's migrations
module is used, if it exists.
Returns the filesystem path to the temporary migrations module.
"""
with tempfile.TemporaryDirectory() as temp_dir:
target_dir = tempfile.mkdtemp(dir=temp_dir)
with open(os.path.join(target_dir, "__init__.py"), "w"):
pass
target_migrations_dir = os.path.join(target_dir, "migrations")
if module is None:
module = apps.get_app_config(app_label).name + ".migrations"
try:
source_migrations_dir = module_dir(import_module(module))
except (ImportError, ValueError):
pass
else:
shutil.copytree(source_migrations_dir, target_migrations_dir)
with extend_sys_path(temp_dir):
new_module = os.path.basename(target_dir) + ".migrations"
with self.settings(MIGRATION_MODULES={app_label: new_module}):
yield target_migrations_dir
class OperationTestBase(MigrationTestBase):
"""Common functions to help test operations."""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._initial_table_names = frozenset(connection.introspection.table_names())
def tearDown(self):
self.cleanup_test_tables()
super().tearDown()
def cleanup_test_tables(self):
table_names = (
frozenset(connection.introspection.table_names())
- self._initial_table_names
)
with connection.schema_editor() as editor:
with connection.constraint_checks_disabled():
for table_name in table_names:
editor.execute(
editor.sql_delete_table
% {
"table": editor.quote_name(table_name),
}
)
def apply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration("name", app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.apply(project_state, editor)
def unapply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration("name", app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.unapply(project_state, editor)
def make_test_state(self, app_label, operation, **kwargs):
"""
Makes a test state using set_up_test_model and returns the
original state and the state after the migration is applied.
"""
project_state = self.set_up_test_model(app_label, **kwargs)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
return project_state, new_state
def set_up_test_model(
self,
app_label,
second_model=False,
third_model=False,
index=False,
multicol_index=False,
related_model=False,
mti_model=False,
proxy_model=False,
manager_model=False,
unique_together=False,
options=False,
db_table=None,
index_together=False,
constraints=None,
indexes=None,
):
"""Creates a test model state and database table."""
# Make the "current" state.
model_options = {
"swappable": "TEST_SWAP_MODEL",
"index_together": [["weight", "pink"]] if index_together else [],
"unique_together": [["pink", "weight"]] if unique_together else [],
}
if options:
model_options["permissions"] = [("can_groom", "Can groom")]
if db_table:
model_options["db_table"] = db_table
operations = [
migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options=model_options,
)
]
if index:
operations.append(
migrations.AddIndex(
"Pony",
models.Index(fields=["pink"], name="pony_pink_idx"),
)
)
if multicol_index:
operations.append(
migrations.AddIndex(
"Pony",
models.Index(fields=["pink", "weight"], name="pony_test_idx"),
)
)
if indexes:
for index in indexes:
operations.append(migrations.AddIndex("Pony", index))
if constraints:
for constraint in constraints:
operations.append(migrations.AddConstraint("Pony", constraint))
if second_model:
operations.append(
migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
],
)
)
if third_model:
operations.append(
migrations.CreateModel(
"Van",
[
("id", models.AutoField(primary_key=True)),
],
)
)
if related_model:
operations.append(
migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
(
"friend",
models.ForeignKey("self", models.CASCADE, null=True),
),
],
)
)
if mti_model:
operations.append(
migrations.CreateModel(
"ShetlandPony",
fields=[
(
"pony_ptr",
models.OneToOneField(
"Pony",
models.CASCADE,
auto_created=True,
parent_link=True,
primary_key=True,
to_field="id",
serialize=False,
),
),
("cuteness", models.IntegerField(default=1)),
],
bases=["%s.Pony" % app_label],
)
)
if proxy_model:
operations.append(
migrations.CreateModel(
"ProxyPony",
fields=[],
options={"proxy": True},
bases=["%s.Pony" % app_label],
)
)
if manager_model:
from .models import FoodManager, FoodQuerySet
operations.append(
migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
)
return self.apply_operations(app_label, ProjectState(), operations)
|
05a995f93e94c7c080e66ce2dcbb9101ca42553660c17d45c197022abe4f36eb | from pathlib import Path
from template_tests.test_response import test_processor_name
from django.template import Context, EngineHandler, RequestContext
from django.template.backends.django import DjangoTemplates
from django.template.library import InvalidTemplateLibrary
from django.test import RequestFactory, override_settings
from .test_dummy import TemplateStringsTests
class DjangoTemplatesTests(TemplateStringsTests):
engine_class = DjangoTemplates
backend_name = "django"
request_factory = RequestFactory()
def test_context_has_priority_over_template_context_processors(self):
# See ticket #23789.
engine = DjangoTemplates(
{
"DIRS": [],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {
"context_processors": [test_processor_name],
},
}
)
template = engine.from_string("{{ processors }}")
request = self.request_factory.get("/")
# Context processors run
content = template.render({}, request)
self.assertEqual(content, "yes")
# Context overrides context processors
content = template.render({"processors": "no"}, request)
self.assertEqual(content, "no")
def test_render_requires_dict(self):
"""django.Template.render() requires a dict."""
engine = DjangoTemplates(
{
"DIRS": [],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {},
}
)
template = engine.from_string("")
context = Context()
request_context = RequestContext(self.request_factory.get("/"), {})
msg = "context must be a dict rather than Context."
with self.assertRaisesMessage(TypeError, msg):
template.render(context)
msg = "context must be a dict rather than RequestContext."
with self.assertRaisesMessage(TypeError, msg):
template.render(request_context)
@override_settings(INSTALLED_APPS=["template_backends.apps.good"])
def test_templatetag_discovery(self):
engine = DjangoTemplates(
{
"DIRS": [],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {
"libraries": {
"alternate": (
"template_backends.apps.good.templatetags.good_tags"
),
"override": (
"template_backends.apps.good.templatetags.good_tags"
),
},
},
}
)
# libraries are discovered from installed applications
self.assertEqual(
engine.engine.libraries["good_tags"],
"template_backends.apps.good.templatetags.good_tags",
)
self.assertEqual(
engine.engine.libraries["subpackage.tags"],
"template_backends.apps.good.templatetags.subpackage.tags",
)
# libraries are discovered from django.templatetags
self.assertEqual(
engine.engine.libraries["static"],
"django.templatetags.static",
)
# libraries passed in OPTIONS are registered
self.assertEqual(
engine.engine.libraries["alternate"],
"template_backends.apps.good.templatetags.good_tags",
)
# libraries passed in OPTIONS take precedence over discovered ones
self.assertEqual(
engine.engine.libraries["override"],
"template_backends.apps.good.templatetags.good_tags",
)
@override_settings(INSTALLED_APPS=["template_backends.apps.importerror"])
def test_templatetag_discovery_import_error(self):
"""
Import errors in tag modules should be reraised with a helpful message.
"""
with self.assertRaisesMessage(
InvalidTemplateLibrary,
"ImportError raised when trying to load "
"'template_backends.apps.importerror.templatetags.broken_tags'",
) as cm:
DjangoTemplates(
{
"DIRS": [],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {},
}
)
self.assertIsInstance(cm.exception.__cause__, ImportError)
def test_builtins_discovery(self):
engine = DjangoTemplates(
{
"DIRS": [],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {
"builtins": ["template_backends.apps.good.templatetags.good_tags"],
},
}
)
self.assertEqual(
engine.engine.builtins,
[
"django.template.defaulttags",
"django.template.defaultfilters",
"django.template.loader_tags",
"template_backends.apps.good.templatetags.good_tags",
],
)
def test_autoescape_off(self):
templates = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {"autoescape": False},
}
]
engines = EngineHandler(templates=templates)
self.assertEqual(
engines["django"]
.from_string("Hello, {{ name }}")
.render({"name": "Bob & Jim"}),
"Hello, Bob & Jim",
)
def test_autoescape_default(self):
templates = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
}
]
engines = EngineHandler(templates=templates)
self.assertEqual(
engines["django"]
.from_string("Hello, {{ name }}")
.render({"name": "Bob & Jim"}),
"Hello, Bob & Jim",
)
def test_default_template_loaders(self):
"""The cached template loader is always enabled by default."""
for debug in (True, False):
with self.subTest(DEBUG=debug), self.settings(DEBUG=debug):
engine = DjangoTemplates(
{"DIRS": [], "APP_DIRS": True, "NAME": "django", "OPTIONS": {}}
)
self.assertEqual(
engine.engine.loaders,
[
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
],
)
def test_dirs_pathlib(self):
engine = DjangoTemplates(
{
"DIRS": [Path(__file__).parent / "templates" / "template_backends"],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {},
}
)
template = engine.get_template("hello.html")
self.assertEqual(template.render({"name": "Joe"}), "Hello Joe!\n")
|
bb6a2812f1f4b0451eaae45efc6ab7abc41f3a89a7769e6dc1f7cbc93281474e | from unittest import mock
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.test import TestCase, override_settings
from django.urls import path, reverse
class Router:
target_db = None
def db_for_read(self, model, **hints):
return self.target_db
db_for_write = db_for_read
def allow_relation(self, obj1, obj2, **hints):
return True
site = admin.AdminSite(name="test_adminsite")
site.register(User, admin_class=UserAdmin)
urlpatterns = [
path("admin/", site.urls),
]
@override_settings(ROOT_URLCONF=__name__, DATABASE_ROUTERS=["%s.Router" % __name__])
class MultiDatabaseTests(TestCase):
databases = {"default", "other"}
@classmethod
def setUpTestData(cls):
cls.superusers = {}
for db in cls.databases:
Router.target_db = db
cls.superusers[db] = User.objects.create_superuser(
username="admin",
password="something",
email="[email protected]",
)
@mock.patch("django.contrib.auth.admin.transaction")
def test_add_view(self, mock):
for db in self.databases:
with self.subTest(db_connection=db):
Router.target_db = db
self.client.force_login(self.superusers[db])
self.client.post(
reverse("test_adminsite:auth_user_add"),
{
"username": "some_user",
"password1": "helloworld",
"password2": "helloworld",
},
)
mock.atomic.assert_called_with(using=db)
|
401aaa68b8f875fb8a0dcda2bda70f3d3926b27119905d12d1d58ade30434645 | import datetime
import itertools
import re
from importlib import import_module
from unittest import mock
from urllib.parse import quote, urljoin
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import BACKEND_SESSION_KEY, REDIRECT_FIELD_NAME, SESSION_KEY
from django.contrib.auth.forms import (
AuthenticationForm,
PasswordChangeForm,
SetPasswordForm,
)
from django.contrib.auth.models import Permission, User
from django.contrib.auth.views import (
INTERNAL_RESET_SESSION_TOKEN,
LoginView,
RedirectURLMixin,
logout_then_login,
redirect_to_login,
)
from django.contrib.contenttypes.models import ContentType
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from django.http import HttpRequest, HttpResponse
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import Client, TestCase, ignore_warnings, override_settings
from django.test.client import RedirectCycleError
from django.urls import NoReverseMatch, reverse, reverse_lazy
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.http import urlsafe_base64_encode
from .client import PasswordResetConfirmClient
from .models import CustomUser, UUIDUser
from .settings import AUTH_TEMPLATES
class RedirectURLMixinTests(TestCase):
@override_settings(ROOT_URLCONF="auth_tests.urls")
def test_get_default_redirect_url_next_page(self):
class RedirectURLView(RedirectURLMixin):
next_page = "/custom/"
self.assertEqual(RedirectURLView().get_default_redirect_url(), "/custom/")
def test_get_default_redirect_url_no_next_page(self):
msg = "No URL to redirect to. Provide a next_page."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
RedirectURLMixin().get_default_redirect_url()
@override_settings(
LANGUAGES=[("en", "English")],
LANGUAGE_CODE="en",
TEMPLATES=AUTH_TEMPLATES,
ROOT_URLCONF="auth_tests.urls",
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for the test classes that follow.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(
username="testclient", password="password", email="[email protected]"
)
cls.u3 = User.objects.create_user(
username="staff", password="password", email="[email protected]"
)
def login(self, username="testclient", password="password", url="/login/"):
response = self.client.post(
url,
{
"username": username,
"password": password,
},
)
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.post("/admin/logout/")
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context["form"].errors.values()))
self.assertIn(str(error), form_errors)
@override_settings(ROOT_URLCONF="django.contrib.auth.urls")
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
("login", [], {}),
("logout", [], {}),
("password_change", [], {}),
("password_change_done", [], {}),
("password_reset", [], {}),
("password_reset_done", [], {}),
(
"password_reset_confirm",
[],
{
"uidb64": "aaaaaaa",
"token": "1111-aaaaa",
},
),
("password_reset_complete", [], {}),
]
for name, args, kwargs in expected_named_urls:
with self.subTest(name=name):
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail(
"Reversal of url named '%s' failed with NoReverseMatch" % name
)
class PasswordResetTest(AuthViewsTestCase):
def setUp(self):
self.client = PasswordResetConfirmClient()
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get("/password_reset/")
self.assertEqual(response.status_code, 200)
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
"/password_reset_extra_email_context/",
{"email": "[email protected]"},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
self.assertIn("http://custom.example.com/reset/", mail.outbox[0].body)
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post(
"/password_reset/html_email_template/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), "text/plain")
self.assertEqual(message.get_payload(1).get_content_type(), "text/html")
self.assertNotIn("<html>", message.get_payload(0).get_payload())
self.assertIn("<html>", message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"""
Email is sent if a valid email address is provided for password reset
when a custom from_email is provided.
"""
response = self.client.post(
"/password_reset_from_email/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("[email protected]", mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with self.assertLogs("django.security.DisallowedHost", "ERROR"):
response = self.client.post(
"/password_reset/",
{"email": "[email protected]"},
HTTP_HOST="www.example:[email protected]",
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with self.assertLogs("django.security.DisallowedHost", "ERROR"):
response = self.client.post(
"/admin_password_reset/",
{"email": "[email protected]"},
HTTP_HOST="www.example:[email protected]",
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
def _test_confirm_start(self):
# Start by creating the email
self.client.post("/password_reset/", {"email": "[email protected]"})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch[0], urlmatch[1]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# A nonexistent user returns a 200 response, not a 404.
response = self.client.get("/reset/123456/1-1/")
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# A base36 user id that overflows int returns a 200 response.
response = self.client.get("/reset/zzzzzzzzzzzzz/1-1/")
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": " anewpassword",
},
)
# Check the password has not been changed
u = User.objects.get(email="[email protected]")
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_invalid_hash(self):
"""A POST with an invalid token is rejected."""
u = User.objects.get(email="[email protected]")
original_password = u.password
url, path = self._test_confirm_start()
path_parts = path.split("-")
path_parts[-1] = ("0") * 20 + "/"
path = "-".join(path_parts)
response = self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": "anewpassword",
},
)
self.assertIs(response.context["validlink"], False)
u.refresh_from_db()
self.assertEqual(original_password, u.password) # password hasn't changed
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
# Check the password has been changed
u = User.objects.get(email="[email protected]")
self.assertTrue(u.check_password("anewpassword"))
# The reset token is deleted from the session.
self.assertNotIn(INTERNAL_RESET_SESSION_TOKEN, self.client.session)
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "x"}
)
self.assertFormError(
response, SetPasswordForm.error_messages["password_mismatch"]
)
def test_reset_redirect_default(self):
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertRedirects(
response, "/password_reset/done/", fetch_redirect_response=False
)
def test_reset_custom_redirect(self):
response = self.client.post(
"/password_reset/custom_redirect/", {"email": "[email protected]"}
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_reset_custom_redirect_named(self):
response = self.client.post(
"/password_reset/custom_redirect/named/",
{"email": "[email protected]"},
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/named/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
def test_confirm_custom_reset_url_token(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/token/")
self.client.reset_url_token = "set-passwordcustom"
response = self.client.post(
path,
{"new_password1": "anewpassword", "new_password2": "anewpassword"},
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
def test_confirm_login_post_reset(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.ModelBackend",
"django.contrib.auth.backends.AllowAllUsersModelBackend",
]
)
def test_confirm_login_post_reset_custom_backend(self):
# This backend is specified in the URL pattern.
backend = "django.contrib.auth.backends.AllowAllUsersModelBackend"
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login_custom_backend/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
self.assertEqual(self.client.session[BACKEND_SESSION_KEY], backend)
def test_confirm_login_post_reset_already_logged_in(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login/")
self.login()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
def test_confirm_display_user_from_form(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# The password_reset_confirm() view passes the user object to the
# SetPasswordForm``, even on GET requests (#16919). For this test,
# {{ form.user }}`` is rendered in the template
# registration/password_reset_confirm.html.
username = User.objects.get(email="[email protected]").username
self.assertContains(response, "Hello, %s." % username)
# However, the view should NOT pass any user object on a form if the
# password reset link was invalid.
response = self.client.get("/reset/zzzzzzzzzzzzz/1-1/")
self.assertContains(response, "Hello, .")
def test_confirm_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
# Don't use PasswordResetConfirmClient (self.client) here which
# automatically fetches the redirect page.
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs["token"]
uuidb64 = response.resolver_match.kwargs["uidb64"]
self.assertRedirects(response, "/reset/%s/set-password/" % uuidb64)
self.assertEqual(client.session["_password_reset_token"], token)
def test_confirm_custom_reset_url_token_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/token/")
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs["token"]
uuidb64 = response.resolver_match.kwargs["uidb64"]
self.assertRedirects(
response, "/reset/custom/token/%s/set-passwordcustom/" % uuidb64
)
self.assertEqual(client.session["_password_reset_token"], token)
def test_invalid_link_if_going_directly_to_the_final_reset_password_url(self):
url, path = self._test_confirm_start()
_, uuidb64, _ = path.strip("/").split("/")
response = Client().get("/reset/%s/set-password/" % uuidb64)
self.assertContains(response, "The password reset link was invalid")
def test_missing_kwargs(self):
msg = "The URL path must contain 'uidb64' and 'token' parameters."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/reset/missing_parameters/")
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
class CustomUserPasswordResetTest(AuthViewsTestCase):
user_email = "[email protected]"
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUser.custom_objects.create(
email="[email protected]",
date_of_birth=datetime.date(1976, 11, 8),
)
cls.u1.set_password("password")
cls.u1.save()
def setUp(self):
self.client = PasswordResetConfirmClient()
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post("/password_reset/", {"email": self.user_email})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch[0], urlmatch[1]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
# then submit a new password
response = self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": "anewpassword",
},
)
self.assertRedirects(response, "/reset/done/")
@override_settings(AUTH_USER_MODEL="auth_tests.UUIDUser")
class UUIDUserPasswordResetTest(CustomUserPasswordResetTest):
def _test_confirm_start(self):
# instead of fixture
UUIDUser.objects.create_user(
email=self.user_email,
username="foo",
password="foo",
)
return super()._test_confirm_start()
def test_confirm_invalid_uuid(self):
"""A uidb64 that decodes to a non-UUID doesn't crash."""
_, path = self._test_confirm_start()
invalid_uidb64 = urlsafe_base64_encode(b"INVALID_UUID")
first, _uuidb64_, second = path.strip("/").split("/")
response = self.client.get(
"/" + "/".join((first, invalid_uidb64, second)) + "/"
)
self.assertContains(response, "The password reset link was invalid")
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self):
response = self.client.post(
"/login/",
{
"username": "testclient",
"password": "password",
},
)
self.assertFormError(
response,
AuthenticationForm.error_messages["invalid_login"]
% {"username": User._meta.get_field("username").verbose_name},
)
def logout(self):
self.client.post("/logout/")
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "donuts",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertFormError(
response, PasswordChangeForm.error_messages["password_incorrect"]
)
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "donuts",
},
)
self.assertFormError(
response, SetPasswordForm.error_messages["password_mismatch"]
)
def test_password_change_succeeds(self):
self.login()
self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.fail_login()
self.login(password="password1")
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_change/done/", fetch_redirect_response=False
)
@override_settings(LOGIN_URL="/login/")
def test_password_change_done_fails(self):
response = self.client.get("/password_change/done/")
self.assertRedirects(
response,
"/login/?next=/password_change/done/",
fetch_redirect_response=False,
)
def test_password_change_redirect_default(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_change/done/", fetch_redirect_response=False
)
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post(
"/password_change/custom/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post(
"/password_change/custom/named/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
class SessionAuthenticationTests(AuthViewsTestCase):
def test_user_password_change_updates_session(self):
"""
#21649 - Ensure contrib.auth.views.password_change updates the user's
session auth hash after a password change so the session isn't logged out.
"""
self.login()
original_session_key = self.client.session.session_key
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
# if the hash isn't updated, retrieving the redirection page will fail.
self.assertRedirects(response, "/password_change/done/")
# The session key is rotated.
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse("login"))
self.assertEqual(response.status_code, 200)
if apps.is_installed("django.contrib.sites"):
Site = apps.get_model("sites.Site")
site = Site.objects.get_current()
self.assertEqual(response.context["site"], site)
self.assertEqual(response.context["site_name"], site.name)
else:
self.assertIsInstance(response.context["site"], RequestSite)
self.assertIsInstance(response.context["form"], AuthenticationForm)
def test_security_check(self):
login_url = reverse("login")
# These URLs should not pass the security check.
bad_urls = (
"http://example.com",
"http:///example.com",
"https://example.com",
"ftp://example.com",
"///example.com",
"//example.com",
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = "%(url)s?%(next)s=%(bad_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"bad_url": quote(bad_url),
}
response = self.client.post(
nasty_url,
{
"username": "testclient",
"password": "password",
},
)
self.assertEqual(response.status_code, 302)
self.assertNotIn(
bad_url, response.url, "%s should be blocked" % bad_url
)
# These URLs should pass the security check.
good_urls = (
"/view/?param=http://example.com",
"/view/?param=https://example.com",
"/view?param=ftp://example.com",
"view/?param=//example.com",
"https://testserver/",
"HTTPS://testserver/",
"//testserver/",
"/url%20with%20spaces/",
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = "%(url)s?%(next)s=%(good_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"good_url": quote(good_url),
}
response = self.client.post(
safe_url,
{
"username": "testclient",
"password": "password",
},
)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
def test_security_check_https(self):
login_url = reverse("login")
non_https_next_url = "http://testserver/path"
not_secured_url = "%(url)s?%(next)s=%(next_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"next_url": quote(non_https_next_url),
}
post_data = {
"username": "testclient",
"password": "password",
}
response = self.client.post(not_secured_url, post_data, secure=True)
self.assertEqual(response.status_code, 302)
self.assertNotEqual(response.url, non_https_next_url)
self.assertEqual(response.url, settings.LOGIN_REDIRECT_URL)
def test_login_form_contains_request(self):
# The custom authentication form for this login requires a request to
# initialize it.
response = self.client.post(
"/custom_request_auth_login/",
{
"username": "testclient",
"password": "password",
},
)
# The login was successful.
self.assertRedirects(
response, settings.LOGIN_REDIRECT_URL, fetch_redirect_response=False
)
def test_login_csrf_rotate(self):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
def get_response(request):
return HttpResponse()
# Do a GET to establish a CSRF token
# The test client isn't used here as it's a test for middleware.
req = HttpRequest()
CsrfViewMiddleware(get_response).process_view(req, LoginView.as_view(), (), {})
# get_token() triggers CSRF token inclusion in the response
get_token(req)
resp = CsrfViewMiddleware(LoginView.as_view())(req)
csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {
"username": "testclient",
"password": "password",
"csrfmiddlewaretoken": token1,
}
# Use POST request to log in
SessionMiddleware(get_response).process_request(req)
CsrfViewMiddleware(get_response).process_view(req, LoginView.as_view(), (), {})
req.META[
"SERVER_NAME"
] = "testserver" # Required to have redirect work in login view
req.META["SERVER_PORT"] = 80
resp = CsrfViewMiddleware(LoginView.as_view())(req)
csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
def test_session_key_flushed_on_login(self):
"""
To avoid reusing another user's session, ensure a new, empty session is
created if the existing session corresponds to a different authenticated
user.
"""
self.login()
original_session_key = self.client.session.session_key
self.login(username="staff")
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_session_key_flushed_on_login_after_password_change(self):
"""
As above, but same user logging in after a password change.
"""
self.login()
original_session_key = self.client.session.session_key
# If no password change, session key should not be flushed.
self.login()
self.assertEqual(original_session_key, self.client.session.session_key)
user = User.objects.get(username="testclient")
user.set_password("foobar")
user.save()
self.login(password="foobar")
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_session_without_hash_session_key(self):
"""
Session without django.contrib.auth.HASH_SESSION_KEY should login
without an exception.
"""
user = User.objects.get(username="testclient")
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[SESSION_KEY] = user.id
session.save()
original_session_key = session.session_key
self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key
self.login()
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_get_default_redirect_url(self):
response = self.login(url="/login/get_default_redirect_url/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_login_next_page(self):
response = self.login(url="/login/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
def test_login_named_next_page_named(self):
response = self.login(url="/login/next_page/named/")
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_login_next_page_overrides_login_redirect_url_setting(self):
response = self.login(url="/login/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
def test_login_redirect_url_overrides_next_page(self):
response = self.login(url="/login/next_page/?next=/test/")
self.assertRedirects(response, "/test/", fetch_redirect_response=False)
def test_login_redirect_url_overrides_get_default_redirect_url(self):
response = self.login(url="/login/get_default_redirect_url/?next=/test/")
self.assertRedirects(response, "/test/", fetch_redirect_response=False)
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url):
response = self.client.get("/login_required/")
self.assertRedirects(response, url, fetch_redirect_response=False)
@override_settings(LOGIN_URL="/login/")
def test_standard_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
@override_settings(LOGIN_URL="login")
def test_named_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
@override_settings(LOGIN_URL="http://remote.example.com/login")
def test_remote_login_url(self):
quoted_next = quote("http://testserver/login_required/")
expected = "http://remote.example.com/login?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL="https:///login/")
def test_https_login_url(self):
quoted_next = quote("http://testserver/login_required/")
expected = "https:///login/?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL="/login/?pretty=1")
def test_login_url_with_querystring(self):
self.assertLoginURLEquals("/login/?pretty=1&next=/login_required/")
@override_settings(LOGIN_URL="http://remote.example.com/login/?next=/default/")
def test_remote_login_url_with_next_querystring(self):
quoted_next = quote("http://testserver/login_required/")
expected = "http://remote.example.com/login/?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_lazy_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertRedirects(response, url, fetch_redirect_response=False)
def test_default(self):
self.assertLoginRedirectURLEqual("/accounts/profile/")
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_custom(self):
self.assertLoginRedirectURLEqual("/custom/")
@override_settings(LOGIN_REDIRECT_URL="password_reset")
def test_named(self):
self.assertLoginRedirectURLEqual("/password_reset/")
@override_settings(LOGIN_REDIRECT_URL="http://remote.example.com/welcome/")
def test_remote(self):
self.assertLoginRedirectURLEqual("http://remote.example.com/welcome/")
class RedirectToLoginTests(AuthViewsTestCase):
"""Tests for the redirect_to_login view"""
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_redirect_to_login_with_lazy(self):
login_redirect_response = redirect_to_login(next="/else/where/")
expected = "/login/?next=/else/where/"
self.assertEqual(expected, login_redirect_response.url)
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_redirect_to_login_with_lazy_and_unicode(self):
login_redirect_response = redirect_to_login(next="/else/where/झ/")
expected = "/login/?next=/else/where/%E0%A4%9D/"
self.assertEqual(expected, login_redirect_response.url)
class LogoutThenLoginTests(AuthViewsTestCase):
"""Tests for the logout_then_login view"""
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
@override_settings(LOGIN_URL="/login/")
def test_default_logout_then_login(self):
self.login()
req = HttpRequest()
req.method = "POST"
csrf_token = get_token(req)
req.COOKIES[settings.CSRF_COOKIE_NAME] = csrf_token
req.POST = {"csrfmiddlewaretoken": csrf_token}
req.META["SERVER_NAME"] = "testserver"
req.META["SERVER_PORT"] = 80
req.session = self.client.session
response = logout_then_login(req)
self.confirm_logged_out()
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
def test_logout_then_login_with_custom_login(self):
self.login()
req = HttpRequest()
req.method = "POST"
csrf_token = get_token(req)
req.COOKIES[settings.CSRF_COOKIE_NAME] = csrf_token
req.POST = {"csrfmiddlewaretoken": csrf_token}
req.META["SERVER_NAME"] = "testserver"
req.META["SERVER_PORT"] = 80
req.session = self.client.session
response = logout_then_login(req, login_url="/custom/")
self.confirm_logged_out()
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
@ignore_warnings(category=RemovedInDjango50Warning)
@override_settings(LOGIN_URL="/login/")
def test_default_logout_then_login_get(self):
self.login()
req = HttpRequest()
req.method = "GET"
req.META["SERVER_NAME"] = "testserver"
req.META["SERVER_PORT"] = 80
req.session = self.client.session
response = logout_then_login(req)
# RemovedInDjango50Warning: When the deprecation ends, replace with
# self.assertEqual(response.status_code, 405)
self.confirm_logged_out()
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
class LoginRedirectAuthenticatedUser(AuthViewsTestCase):
dont_redirect_url = "/login/redirect_authenticated_user_default/"
do_redirect_url = "/login/redirect_authenticated_user/"
def test_default(self):
"""Stay on the login page by default."""
self.login()
response = self.client.get(self.dont_redirect_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["next"], "")
def test_guest(self):
"""If not logged in, stay on the same page."""
response = self.client.get(self.do_redirect_url)
self.assertEqual(response.status_code, 200)
def test_redirect(self):
"""If logged in, go to default redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(
response, "/accounts/profile/", fetch_redirect_response=False
)
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_redirect_url(self):
"""If logged in, go to custom redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_redirect_param(self):
"""If next is specified as a GET parameter, go there."""
self.login()
url = self.do_redirect_url + "?next=/custom_next/"
response = self.client.get(url)
self.assertRedirects(response, "/custom_next/", fetch_redirect_response=False)
def test_redirect_loop(self):
"""
Detect a redirect loop if LOGIN_REDIRECT_URL is not correctly set,
with and without custom parameters.
"""
self.login()
msg = (
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page."
)
with self.settings(LOGIN_REDIRECT_URL=self.do_redirect_url):
with self.assertRaisesMessage(ValueError, msg):
self.client.get(self.do_redirect_url)
url = self.do_redirect_url + "?bla=2"
with self.assertRaisesMessage(ValueError, msg):
self.client.get(url)
def test_permission_required_not_logged_in(self):
# Not logged in ...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirected to login.
response = self.client.get("/permission_required_redirect/", follow=True)
self.assertEqual(response.status_code, 200)
# exception raised.
response = self.client.get("/permission_required_exception/", follow=True)
self.assertEqual(response.status_code, 403)
# redirected to login.
response = self.client.get(
"/login_and_permission_required_exception/", follow=True
)
self.assertEqual(response.status_code, 200)
def test_permission_required_logged_in(self):
self.login()
# Already logged in...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirect loop encountered.
with self.assertRaisesMessage(
RedirectCycleError, "Redirect loop detected."
):
self.client.get("/permission_required_redirect/", follow=True)
# exception raised.
response = self.client.get("/permission_required_exception/", follow=True)
self.assertEqual(response.status_code, 403)
# exception raised.
response = self.client.get(
"/login_and_permission_required_exception/", follow=True
)
self.assertEqual(response.status_code, 403)
class LoginSuccessURLAllowedHostsTest(AuthViewsTestCase):
def test_success_url_allowed_hosts_same_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://testserver/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "https://testserver/home", fetch_redirect_response=False
)
def test_success_url_allowed_hosts_safe_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://otherserver/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "https://otherserver/home", fetch_redirect_response=False
)
def test_success_url_allowed_hosts_unsafe_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://evil/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "/accounts/profile/", fetch_redirect_response=False
)
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.post("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def test_logout_with_post(self):
self.login()
response = self.client.post("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def test_logout_with_get_raises_deprecation_warning(self):
self.login()
msg = (
"Log out via GET requests is deprecated and will be removed in Django 5.0. "
"Use POST requests for logging out."
)
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
response = self.client.get("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.post("/logout/")
self.assertIn("site", response.context)
def test_logout_doesnt_cache(self):
"""
The logout() view should send "no-cache" headers for reasons described
in #25490.
"""
response = self.client.post("/logout/")
self.assertIn("no-store", response.headers["Cache-Control"])
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.post("/logout/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
response = self.client.post("/logout/next_page/?next=/login/")
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.post("/logout/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.post("/logout/?next=/login/")
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.post("/logout/custom_query/?follow=/somewhere/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.post("/logout/next_page/named/")
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_same_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://testserver/")
self.assertRedirects(
response, "https://testserver/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_safe_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://otherserver/")
self.assertRedirects(
response, "https://otherserver/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_unsafe_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://evil/")
self.assertRedirects(
response, "/logout/allowed_hosts/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_security_check(self):
logout_url = reverse("logout")
# These URLs should not pass the security check.
bad_urls = (
"http://example.com",
"http:///example.com",
"https://example.com",
"ftp://example.com",
"///example.com",
"//example.com",
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = "%(url)s?%(next)s=%(bad_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"bad_url": quote(bad_url),
}
self.login()
response = self.client.post(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertNotIn(
bad_url, response.url, "%s should be blocked" % bad_url
)
self.confirm_logged_out()
# These URLs should pass the security check.
good_urls = (
"/view/?param=http://example.com",
"/view/?param=https://example.com",
"/view?param=ftp://example.com",
"view/?param=//example.com",
"https://testserver/",
"HTTPS://testserver/",
"//testserver/",
"/url%20with%20spaces/",
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = "%(url)s?%(next)s=%(good_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"good_url": quote(good_url),
}
self.login()
response = self.client.post(safe_url)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
self.confirm_logged_out()
def test_security_check_https(self):
logout_url = reverse("logout")
non_https_next_url = "http://testserver/"
url = "%(url)s?%(next)s=%(next_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"next_url": quote(non_https_next_url),
}
self.login()
response = self.client.post(url, secure=True)
self.assertRedirects(response, logout_url, fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_preserve_language(self):
"""Language is preserved after logout."""
self.login()
self.client.post("/setlang/", {"language": "pl"})
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "pl")
self.client.post("/logout/")
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "pl")
@override_settings(LOGOUT_REDIRECT_URL="/custom/")
def test_logout_redirect_url_setting(self):
self.login()
response = self.client.post("/logout/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
@override_settings(LOGOUT_REDIRECT_URL="/custom/")
def test_logout_redirect_url_setting_allowed_hosts_unsafe_host(self):
self.login()
response = self.client.post("/logout/allowed_hosts/?next=https://evil/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
@override_settings(LOGOUT_REDIRECT_URL="logout")
def test_logout_redirect_url_named_setting(self):
self.login()
response = self.client.post("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def get_perm(Model, perm):
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
# Redirect in test_user_change_password will fail if session auth hash
# isn't updated after password change (#21649)
@override_settings(ROOT_URLCONF="auth_tests.urls_admin")
class ChangelistTests(AuthViewsTestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
# Make me a superuser before logging in.
User.objects.filter(username="testclient").update(
is_staff=True, is_superuser=True
)
def setUp(self):
self.login()
# Get the latest last_login value.
self.admin = User.objects.get(pk=self.u1.pk)
def get_user_data(self, user):
return {
"username": user.username,
"password": user.password,
"email": user.email,
"is_active": user.is_active,
"is_staff": user.is_staff,
"is_superuser": user.is_superuser,
"last_login_0": user.last_login.strftime("%Y-%m-%d"),
"last_login_1": user.last_login.strftime("%H:%M:%S"),
"initial-last_login_0": user.last_login.strftime("%Y-%m-%d"),
"initial-last_login_1": user.last_login.strftime("%H:%M:%S"),
"date_joined_0": user.date_joined.strftime("%Y-%m-%d"),
"date_joined_1": user.date_joined.strftime("%H:%M:%S"),
"initial-date_joined_0": user.date_joined.strftime("%Y-%m-%d"),
"initial-date_joined_1": user.date_joined.strftime("%H:%M:%S"),
"first_name": user.first_name,
"last_name": user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with self.assertLogs("django.security.DisallowedModelAdminLookup", "ERROR"):
response = self.client.get(
reverse("auth_test_admin:auth_user_changelist")
+ "?password__startswith=sha1$"
)
self.assertEqual(response.status_code, 400)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data["email"] = "new_" + data["email"]
response = self.client.post(
reverse("auth_test_admin:auth_user_change", args=(self.admin.pk,)), data
)
self.assertRedirects(response, reverse("auth_test_admin:auth_user_changelist"))
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "Changed Email address.")
def test_user_not_change(self):
response = self.client.post(
reverse("auth_test_admin:auth_user_change", args=(self.admin.pk,)),
self.get_user_data(self.admin),
)
self.assertRedirects(response, reverse("auth_test_admin:auth_user_changelist"))
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "No fields changed.")
def test_user_change_password(self):
user_change_url = reverse(
"auth_test_admin:auth_user_change", args=(self.admin.pk,)
)
password_change_url = reverse(
"auth_test_admin:auth_user_password_change", args=(self.admin.pk,)
)
response = self.client.get(user_change_url)
# Test the link inside password field help_text.
rel_link = re.search(
r'you can change the password using <a href="([^"]*)">this form</a>',
response.content.decode(),
)[1]
self.assertEqual(urljoin(user_change_url, rel_link), password_change_url)
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "Changed password.")
self.logout()
self.login(password="password1")
def test_user_change_different_user_password(self):
u = User.objects.get(email="[email protected]")
response = self.client.post(
reverse("auth_test_admin:auth_user_password_change", args=(u.pk,)),
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(
response, reverse("auth_test_admin:auth_user_change", args=(u.pk,))
)
row = LogEntry.objects.latest("id")
self.assertEqual(row.user_id, self.admin.pk)
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), "Changed password.")
def test_password_change_bad_url(self):
response = self.client.get(
reverse("auth_test_admin:auth_user_password_change", args=("foobar",))
)
self.assertEqual(response.status_code, 404)
@mock.patch("django.contrib.auth.admin.UserAdmin.has_change_permission")
def test_user_change_password_passes_user_to_has_change_permission(
self, has_change_permission
):
url = reverse(
"auth_test_admin:auth_user_password_change", args=(self.admin.pk,)
)
self.client.post(url, {"password1": "password1", "password2": "password1"})
(_request, user), _kwargs = has_change_permission.call_args
self.assertEqual(user.pk, self.admin.pk)
def test_view_user_password_is_readonly(self):
u = User.objects.get(username="testclient")
u.is_superuser = False
u.save()
original_password = u.password
u.user_permissions.add(get_perm(User, "view_user"))
response = self.client.get(
reverse("auth_test_admin:auth_user_change", args=(u.pk,)),
)
algo, salt, hash_string = u.password.split("$")
self.assertContains(response, '<div class="readonly">testclient</div>')
# ReadOnlyPasswordHashWidget is used to render the field.
self.assertContains(
response,
"<strong>algorithm</strong>: %s\n\n"
"<strong>salt</strong>: %s********************\n\n"
"<strong>hash</strong>: %s**************************\n\n"
% (
algo,
salt[:2],
hash_string[:6],
),
html=True,
)
# Value in POST data is ignored.
data = self.get_user_data(u)
data["password"] = "shouldnotchange"
change_url = reverse("auth_test_admin:auth_user_change", args=(u.pk,))
response = self.client.post(change_url, data)
self.assertEqual(response.status_code, 403)
u.refresh_from_db()
self.assertEqual(u.password, original_password)
@override_settings(
AUTH_USER_MODEL="auth_tests.UUIDUser",
ROOT_URLCONF="auth_tests.urls_custom_user_admin",
)
class UUIDUserTests(TestCase):
def test_admin_password_change(self):
u = UUIDUser.objects.create_superuser(
username="uuid", email="[email protected]", password="test"
)
self.assertTrue(self.client.login(username="uuid", password="test"))
user_change_url = reverse(
"custom_user_admin:auth_tests_uuiduser_change", args=(u.pk,)
)
response = self.client.get(user_change_url)
self.assertEqual(response.status_code, 200)
password_change_url = reverse(
"custom_user_admin:auth_user_password_change", args=(u.pk,)
)
response = self.client.get(password_change_url)
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="uuiduser_form">')
# A LogEntry is created with pk=1 which breaks a FK constraint on MySQL
with connection.constraint_checks_disabled():
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest("id")
self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change()
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), "Changed password.")
# The LogEntry.user column isn't altered to a UUID type so it's set to
# an integer manually in CustomUserAdmin to avoid an error. To avoid a
# constraint error, delete the entry before constraints are checked
# after the test.
row.delete()
|
5d8a7b51e02c9f058824f79c3f7236ab9862ffb966c369679dc23c36444dc7d4 | import threading
import time
from unittest import mock
from multiple_database.routers import TestRouter
from django.core.exceptions import FieldError
from django.db import (
DatabaseError,
NotSupportedError,
connection,
connections,
router,
transaction,
)
from django.test import (
TransactionTestCase,
override_settings,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext
from .models import (
City,
CityCountryProxy,
Country,
EUCity,
EUCountry,
Person,
PersonProfile,
)
class SelectForUpdateTests(TransactionTestCase):
available_apps = ["select_for_update"]
def setUp(self):
# This is executed in autocommit mode so that code in
# run_select_for_update can see this data.
self.country1 = Country.objects.create(name="Belgium")
self.country2 = Country.objects.create(name="France")
self.city1 = City.objects.create(name="Liberchies", country=self.country1)
self.city2 = City.objects.create(name="Samois-sur-Seine", country=self.country2)
self.person = Person.objects.create(
name="Reinhardt", born=self.city1, died=self.city2
)
self.person_profile = PersonProfile.objects.create(person=self.person)
# We need another database connection in transaction to test that one
# connection issuing a SELECT ... FOR UPDATE will block.
self.new_connection = connection.copy()
def tearDown(self):
try:
self.end_blocking_transaction()
except (DatabaseError, AttributeError):
pass
self.new_connection.close()
def start_blocking_transaction(self):
self.new_connection.set_autocommit(False)
# Start a blocking transaction. At some point,
# end_blocking_transaction() should be called.
self.cursor = self.new_connection.cursor()
sql = "SELECT * FROM %(db_table)s %(for_update)s;" % {
"db_table": Person._meta.db_table,
"for_update": self.new_connection.ops.for_update_sql(),
}
self.cursor.execute(sql, ())
self.cursor.fetchone()
def end_blocking_transaction(self):
# Roll back the blocking transaction.
self.cursor.close()
self.new_connection.rollback()
self.new_connection.set_autocommit(True)
def has_for_update_sql(self, queries, **kwargs):
# Examine the SQL that was executed to determine whether it
# contains the 'SELECT..FOR UPDATE' stanza.
for_update_sql = connection.ops.for_update_sql(**kwargs)
return any(for_update_sql in query["sql"] for query in queries)
@skipUnlessDBFeature("has_select_for_update")
def test_for_update_sql_generated(self):
"""
The backend's FOR UPDATE variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update())
self.assertTrue(self.has_for_update_sql(ctx.captured_queries))
@skipUnlessDBFeature("has_select_for_update_nowait")
def test_for_update_sql_generated_nowait(self):
"""
The backend's FOR UPDATE NOWAIT variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update(nowait=True))
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, nowait=True))
@skipUnlessDBFeature("has_select_for_update_skip_locked")
def test_for_update_sql_generated_skip_locked(self):
"""
The backend's FOR UPDATE SKIP LOCKED variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update(skip_locked=True))
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, skip_locked=True))
@skipUnlessDBFeature("has_select_for_no_key_update")
def test_update_sql_generated_no_key(self):
"""
The backend's FOR NO KEY UPDATE variant appears in generated SQL when
select_for_update() is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update(no_key=True))
self.assertIs(self.has_for_update_sql(ctx.captured_queries, no_key=True), True)
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_generated_of(self):
"""
The backend's FOR UPDATE OF variant appears in the generated SQL when
select_for_update() is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
Person.objects.select_related(
"born__country",
)
.select_for_update(
of=("born__country",),
)
.select_for_update(of=("self", "born__country"))
)
features = connections["default"].features
if features.select_for_update_of_column:
expected = [
'select_for_update_person"."id',
'select_for_update_country"."entity_ptr_id',
]
else:
expected = ["select_for_update_person", "select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_inheritance_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(EUCountry.objects.select_for_update(of=("self",)))
if connection.features.select_for_update_of_column:
expected = ['select_for_update_eucountry"."country_ptr_id']
else:
expected = ["select_for_update_eucountry"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_inheritance_ptr_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCountry.objects.select_for_update(
of=(
"self",
"country_ptr",
)
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_eucountry"."country_ptr_id',
'select_for_update_country"."entity_ptr_id',
]
else:
expected = ["select_for_update_eucountry", "select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_related_model_inheritance_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCity.objects.select_related("country").select_for_update(
of=("self", "country"),
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_eucity"."id',
'select_for_update_eucountry"."country_ptr_id',
]
else:
expected = ["select_for_update_eucity", "select_for_update_eucountry"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_inheritance_nested_ptr_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCity.objects.select_related("country").select_for_update(
of=(
"self",
"country__country_ptr",
),
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_eucity"."id',
'select_for_update_country"."entity_ptr_id',
]
else:
expected = ["select_for_update_eucity", "select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_multilevel_model_inheritance_ptr_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCountry.objects.select_for_update(
of=("country_ptr", "country_ptr__entity_ptr"),
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_country"."entity_ptr_id',
'select_for_update_entity"."id',
]
else:
expected = ["select_for_update_country", "select_for_update_entity"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_proxy_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
CityCountryProxy.objects.select_related("country",).select_for_update(
of=("country",),
)
)
if connection.features.select_for_update_of_column:
expected = ['select_for_update_country"."entity_ptr_id']
else:
expected = ["select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_of_followed_by_values(self):
with transaction.atomic():
values = list(Person.objects.select_for_update(of=("self",)).values("pk"))
self.assertEqual(values, [{"pk": self.person.pk}])
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_of_followed_by_values_list(self):
with transaction.atomic():
values = list(
Person.objects.select_for_update(of=("self",)).values_list("pk")
)
self.assertEqual(values, [(self.person.pk,)])
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_of_self_when_self_is_not_selected(self):
"""
select_for_update(of=['self']) when the only columns selected are from
related tables.
"""
with transaction.atomic():
values = list(
Person.objects.select_related("born")
.select_for_update(of=("self",))
.values("born__name")
)
self.assertEqual(values, [{"born__name": self.city1.name}])
@skipUnlessDBFeature(
"has_select_for_update_of",
"supports_select_for_update_with_limit",
)
def test_for_update_of_with_exists(self):
with transaction.atomic():
qs = Person.objects.select_for_update(of=("self", "born"))
self.assertIs(qs.exists(), True)
@skipUnlessDBFeature("has_select_for_update_nowait", "supports_transactions")
def test_nowait_raises_error_on_block(self):
"""
If nowait is specified, we expect an error to be raised rather
than blocking.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={"nowait": True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature("has_select_for_update_skip_locked", "supports_transactions")
def test_skip_locked_skips_locked_rows(self):
"""
If skip_locked is specified, the locked row is skipped resulting in
Person.DoesNotExist.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={"skip_locked": True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], Person.DoesNotExist)
@skipIfDBFeature("has_select_for_update_nowait")
@skipUnlessDBFeature("has_select_for_update")
def test_unsupported_nowait_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE NOWAIT is run on
a database backend that supports FOR UPDATE but not NOWAIT.
"""
with self.assertRaisesMessage(
NotSupportedError, "NOWAIT is not supported on this database backend."
):
with transaction.atomic():
Person.objects.select_for_update(nowait=True).get()
@skipIfDBFeature("has_select_for_update_skip_locked")
@skipUnlessDBFeature("has_select_for_update")
def test_unsupported_skip_locked_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE SKIP LOCKED is run
on a database backend that supports FOR UPDATE but not SKIP LOCKED.
"""
with self.assertRaisesMessage(
NotSupportedError, "SKIP LOCKED is not supported on this database backend."
):
with transaction.atomic():
Person.objects.select_for_update(skip_locked=True).get()
@skipIfDBFeature("has_select_for_update_of")
@skipUnlessDBFeature("has_select_for_update")
def test_unsupported_of_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE OF... is run on
a database backend that supports FOR UPDATE but not OF.
"""
msg = "FOR UPDATE OF is not supported on this database backend."
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
Person.objects.select_for_update(of=("self",)).get()
@skipIfDBFeature("has_select_for_no_key_update")
@skipUnlessDBFeature("has_select_for_update")
def test_unsuported_no_key_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR NO KEY UPDATE... is run
on a database backend that supports FOR UPDATE but not NO KEY.
"""
msg = "FOR NO KEY UPDATE is not supported on this database backend."
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
Person.objects.select_for_update(no_key=True).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_unrelated_of_argument_raises_error(self):
"""
FieldError is raised if a non-relation field is specified in of=(...).
"""
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): %s. "
"Only relational fields followed in the query are allowed. "
"Choices are: self, born, born__country, "
"born__country__entity_ptr."
)
invalid_of = [
("nonexistent",),
("name",),
("born__nonexistent",),
("born__name",),
("born__nonexistent", "born__name"),
]
for of in invalid_of:
with self.subTest(of=of):
with self.assertRaisesMessage(FieldError, msg % ", ".join(of)):
with transaction.atomic():
Person.objects.select_related(
"born__country"
).select_for_update(of=of).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_related_but_unselected_of_argument_raises_error(self):
"""
FieldError is raised if a relation field that is not followed in the
query is specified in of=(...).
"""
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): %s. "
"Only relational fields followed in the query are allowed. "
"Choices are: self, born, profile."
)
for name in ["born__country", "died", "died__country"]:
with self.subTest(name=name):
with self.assertRaisesMessage(FieldError, msg % name):
with transaction.atomic():
Person.objects.select_related("born", "profile",).exclude(
profile=None
).select_for_update(of=(name,)).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_model_inheritance_of_argument_raises_error_ptr_in_choices(self):
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): "
"name. Only relational fields followed in the query are allowed. "
"Choices are: self, %s."
)
with self.assertRaisesMessage(
FieldError,
msg % "country, country__country_ptr, country__country_ptr__entity_ptr",
):
with transaction.atomic():
EUCity.objects.select_related(
"country",
).select_for_update(of=("name",)).get()
with self.assertRaisesMessage(
FieldError, msg % "country_ptr, country_ptr__entity_ptr"
):
with transaction.atomic():
EUCountry.objects.select_for_update(of=("name",)).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_model_proxy_of_argument_raises_error_proxy_field_in_choices(self):
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): "
"name. Only relational fields followed in the query are allowed. "
"Choices are: self, country, country__entity_ptr."
)
with self.assertRaisesMessage(FieldError, msg):
with transaction.atomic():
CityCountryProxy.objects.select_related(
"country",
).select_for_update(of=("name",)).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_reverse_one_to_one_of_arguments(self):
"""
Reverse OneToOneFields may be included in of=(...) as long as NULLs
are excluded because LEFT JOIN isn't allowed in SELECT FOR UPDATE.
"""
with transaction.atomic():
person = (
Person.objects.select_related(
"profile",
)
.exclude(profile=None)
.select_for_update(of=("profile",))
.get()
)
self.assertEqual(person.profile, self.person_profile)
@skipUnlessDBFeature("has_select_for_update")
def test_for_update_after_from(self):
features_class = connections["default"].features.__class__
attribute_to_patch = "%s.%s.for_update_after_from" % (
features_class.__module__,
features_class.__name__,
)
with mock.patch(attribute_to_patch, return_value=True):
with transaction.atomic():
self.assertIn(
"FOR UPDATE WHERE",
str(Person.objects.filter(name="foo").select_for_update().query),
)
@skipUnlessDBFeature("has_select_for_update", "supports_transactions")
def test_for_update_requires_transaction(self):
"""
A TransactionManagementError is raised
when a select_for_update query is executed outside of a transaction.
"""
msg = "select_for_update cannot be used outside of a transaction."
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
list(Person.objects.select_for_update())
@skipUnlessDBFeature("has_select_for_update", "supports_transactions")
def test_for_update_requires_transaction_only_in_execution(self):
"""
No TransactionManagementError is raised
when select_for_update is invoked outside of a transaction -
only when the query is executed.
"""
people = Person.objects.select_for_update()
msg = "select_for_update cannot be used outside of a transaction."
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
list(people)
@skipUnlessDBFeature("supports_select_for_update_with_limit")
def test_select_for_update_with_limit(self):
other = Person.objects.create(name="Grappeli", born=self.city1, died=self.city2)
with transaction.atomic():
qs = list(Person.objects.order_by("pk").select_for_update()[1:2])
self.assertEqual(qs[0], other)
@skipIfDBFeature("supports_select_for_update_with_limit")
def test_unsupported_select_for_update_with_limit(self):
msg = (
"LIMIT/OFFSET is not supported with select_for_update on this database "
"backend."
)
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
list(Person.objects.order_by("pk").select_for_update()[1:2])
def run_select_for_update(self, status, **kwargs):
"""
Utility method that runs a SELECT FOR UPDATE against all
Person instances. After the select_for_update, it attempts
to update the name of the only record, save, and commit.
This function expects to run in a separate thread.
"""
status.append("started")
try:
# We need to enter transaction management again, as this is done on
# per-thread basis
with transaction.atomic():
person = Person.objects.select_for_update(**kwargs).get()
person.name = "Fred"
person.save()
except (DatabaseError, Person.DoesNotExist) as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
connection.close()
@skipUnlessDBFeature("has_select_for_update")
@skipUnlessDBFeature("supports_transactions")
def test_block(self):
"""
A thread running a select_for_update that accesses rows being touched
by a similar operation on another connection blocks correctly.
"""
# First, let's start the transaction in our thread.
self.start_blocking_transaction()
# Now, try it again using the ORM's select_for_update
# facility. Do this in a separate thread.
status = []
thread = threading.Thread(target=self.run_select_for_update, args=(status,))
# The thread should immediately block, but we'll sleep
# for a bit to make sure.
thread.start()
sanity_count = 0
while len(status) != 1 and sanity_count < 10:
sanity_count += 1
time.sleep(1)
if sanity_count >= 10:
raise ValueError("Thread did not run and block")
# Check the person hasn't been updated. Since this isn't
# using FOR UPDATE, it won't block.
p = Person.objects.get(pk=self.person.pk)
self.assertEqual("Reinhardt", p.name)
# When we end our blocking transaction, our thread should
# be able to continue.
self.end_blocking_transaction()
thread.join(5.0)
# Check the thread has finished. Assuming it has, we should
# find that it has updated the person's name.
self.assertFalse(thread.is_alive())
# We must commit the transaction to ensure that MySQL gets a fresh read,
# since by default it runs in REPEATABLE READ mode
transaction.commit()
p = Person.objects.get(pk=self.person.pk)
self.assertEqual("Fred", p.name)
@skipUnlessDBFeature("has_select_for_update", "supports_transactions")
def test_raw_lock_not_available(self):
"""
Running a raw query which can't obtain a FOR UPDATE lock raises
the correct exception
"""
self.start_blocking_transaction()
def raw(status):
try:
list(
Person.objects.raw(
"SELECT * FROM %s %s"
% (
Person._meta.db_table,
connection.ops.for_update_sql(nowait=True),
)
)
)
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
# Connection cannot be closed on Oracle because cursor is still
# open.
if connection.vendor != "oracle":
connection.close()
status = []
thread = threading.Thread(target=raw, kwargs={"status": status})
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature("has_select_for_update")
@override_settings(DATABASE_ROUTERS=[TestRouter()])
def test_select_for_update_on_multidb(self):
query = Person.objects.select_for_update()
self.assertEqual(router.db_for_write(Person), query.db)
@skipUnlessDBFeature("has_select_for_update")
def test_select_for_update_with_get(self):
with transaction.atomic():
person = Person.objects.select_for_update().get(name="Reinhardt")
self.assertEqual(person.name, "Reinhardt")
def test_nowait_and_skip_locked(self):
with self.assertRaisesMessage(
ValueError, "The nowait option cannot be used with skip_locked."
):
Person.objects.select_for_update(nowait=True, skip_locked=True)
def test_ordered_select_for_update(self):
"""
Subqueries should respect ordering as an ORDER BY clause may be useful
to specify a row locking order to prevent deadlocks (#27193).
"""
with transaction.atomic():
qs = Person.objects.filter(
id__in=Person.objects.order_by("-id").select_for_update()
)
self.assertIn("ORDER BY", str(qs.query))
|
30e59dfe6713e99759566a1b75fdcdd42f95c3ea287181cd9710951dbd10d295 | import datetime
import math
import re
from decimal import Decimal
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models import (
Avg,
Case,
Count,
DateField,
DateTimeField,
DecimalField,
DurationField,
Exists,
F,
FloatField,
IntegerField,
Max,
Min,
OuterRef,
Q,
StdDev,
Subquery,
Sum,
TimeField,
Value,
Variance,
When,
)
from django.db.models.expressions import Func, RawSQL
from django.db.models.functions import (
Cast,
Coalesce,
Greatest,
Now,
Pi,
TruncDate,
TruncHour,
)
from django.test import TestCase
from django.test.testcases import skipUnlessDBFeature
from django.test.utils import Approximate, CaptureQueriesContext
from django.utils import timezone
from .models import Author, Book, Publisher, Store
class NowUTC(Now):
template = "CURRENT_TIMESTAMP"
output_field = DateTimeField()
def as_sql(self, compiler, connection, **extra_context):
if connection.features.test_now_utc_template:
extra_context["template"] = connection.features.test_now_utc_template
return super().as_sql(compiler, connection, **extra_context)
class AggregateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="Brad Dayley", age=45)
cls.a4 = Author.objects.create(name="James Bennett", age=29)
cls.a5 = Author.objects.create(name="Jeffrey Forcier", age=37)
cls.a6 = Author.objects.create(name="Paul Bissex", age=29)
cls.a7 = Author.objects.create(name="Wesley J. Chun", age=25)
cls.a8 = Author.objects.create(name="Peter Norvig", age=57)
cls.a9 = Author.objects.create(name="Stuart Russell", age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(
name="Apress", num_awards=3, duration=datetime.timedelta(days=1)
)
cls.p2 = Publisher.objects.create(
name="Sams", num_awards=1, duration=datetime.timedelta(days=2)
)
cls.p3 = Publisher.objects.create(name="Prentice Hall", num_awards=7)
cls.p4 = Publisher.objects.create(name="Morgan Kaufmann", num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn="159059725",
name="The Definitive Guide to Django: Web Development Done Right",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6),
)
cls.b2 = Book.objects.create(
isbn="067232959",
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
rating=3.0,
price=Decimal("23.09"),
contact=cls.a3,
publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3),
)
cls.b3 = Book.objects.create(
isbn="159059996",
name="Practical Django Projects",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a4,
publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23),
)
cls.b4 = Book.objects.create(
isbn="013235613",
name="Python Web Development with Django",
pages=350,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a5,
publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3),
)
cls.b5 = Book.objects.create(
isbn="013790395",
name="Artificial Intelligence: A Modern Approach",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a8,
publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15),
)
cls.b6 = Book.objects.create(
isbn="155860191",
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a8,
publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15),
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
s2 = Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30),
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def test_empty_aggregate(self):
self.assertEqual(Author.objects.aggregate(), {})
def test_aggregate_in_order_by(self):
msg = (
"Using an aggregate in order_by() without also including it in "
"annotate() is not allowed: Avg(F(book__rating)"
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.values("age").order_by(Avg("book__rating"))
def test_single_aggregate(self):
vals = Author.objects.aggregate(Avg("age"))
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
def test_multiple_aggregates(self):
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
self.assertEqual(
vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)}
)
def test_filter_aggregate(self):
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
self.assertEqual(vals, {"age__sum": 254})
def test_related_aggregate(self):
vals = Author.objects.aggregate(Avg("friends__age"))
self.assertEqual(vals, {"friends__age__avg": Approximate(34.07, places=2)})
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
self.assertEqual(vals, {"authors__age__avg": Approximate(38.2857, places=2)})
vals = Author.objects.filter(name__contains="a").aggregate(Avg("book__rating"))
self.assertEqual(vals, {"book__rating__avg": 4.0})
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
self.assertEqual(vals, {"publisher__num_awards__sum": 30})
vals = Publisher.objects.aggregate(Sum("book__price"))
self.assertEqual(vals, {"book__price__sum": Decimal("270.27")})
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(vals, {"books__authors__age__max": 57})
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(vals, {"book__publisher__num_awards__min": 1})
def test_aggregate_alias(self):
vals = Store.objects.filter(name="Amazon.com").aggregate(
amazon_mean=Avg("books__rating")
)
self.assertEqual(vals, {"amazon_mean": Approximate(4.08, places=2)})
def test_aggregate_transform(self):
vals = Store.objects.aggregate(min_month=Min("original_opening__month"))
self.assertEqual(vals, {"min_month": 3})
def test_aggregate_join_transform(self):
vals = Publisher.objects.aggregate(min_year=Min("book__pubdate__year"))
self.assertEqual(vals, {"min_year": 1991})
def test_annotate_basic(self):
self.assertQuerysetEqual(
Book.objects.annotate().order_by("pk"),
[
"The Definitive Guide to Django: Web Development Done Right",
"Sams Teach Yourself Django in 24 Hours",
"Practical Django Projects",
"Python Web Development with Django",
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
],
lambda b: b.name,
)
books = Book.objects.annotate(mean_age=Avg("authors__age"))
b = books.get(pk=self.b1.pk)
self.assertEqual(
b.name, "The Definitive Guide to Django: Web Development Done Right"
)
self.assertEqual(b.mean_age, 34.5)
def test_annotate_defer(self):
qs = (
Book.objects.annotate(page_sum=Sum("pages"))
.defer("name")
.filter(pk=self.b1.pk)
)
rows = [
(
self.b1.id,
"159059725",
447,
"The Definitive Guide to Django: Web Development Done Right",
)
]
self.assertQuerysetEqual(
qs.order_by("pk"), rows, lambda r: (r.id, r.isbn, r.page_sum, r.name)
)
def test_annotate_defer_select_related(self):
qs = (
Book.objects.select_related("contact")
.annotate(page_sum=Sum("pages"))
.defer("name")
.filter(pk=self.b1.pk)
)
rows = [
(
self.b1.id,
"159059725",
447,
"Adrian Holovaty",
"The Definitive Guide to Django: Web Development Done Right",
)
]
self.assertQuerysetEqual(
qs.order_by("pk"),
rows,
lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name),
)
def test_annotate_m2m(self):
books = (
Book.objects.filter(rating__lt=4.5)
.annotate(Avg("authors__age"))
.order_by("name")
)
self.assertQuerysetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 51.5),
("Practical Django Projects", 29.0),
("Python Web Development with Django", Approximate(30.3, places=1)),
("Sams Teach Yourself Django in 24 Hours", 45.0),
],
lambda b: (b.name, b.authors__age__avg),
)
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
self.assertQuerysetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 2),
(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
1,
),
("Practical Django Projects", 1),
("Python Web Development with Django", 3),
("Sams Teach Yourself Django in 24 Hours", 1),
("The Definitive Guide to Django: Web Development Done Right", 2),
],
lambda b: (b.name, b.num_authors),
)
def test_backwards_m2m_annotate(self):
authors = (
Author.objects.filter(name__contains="a")
.annotate(Avg("book__rating"))
.order_by("name")
)
self.assertQuerysetEqual(
authors,
[
("Adrian Holovaty", 4.5),
("Brad Dayley", 3.0),
("Jacob Kaplan-Moss", 4.5),
("James Bennett", 4.0),
("Paul Bissex", 4.0),
("Stuart Russell", 4.0),
],
lambda a: (a.name, a.book__rating__avg),
)
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
self.assertQuerysetEqual(
authors,
[
("Adrian Holovaty", 1),
("Brad Dayley", 1),
("Jacob Kaplan-Moss", 1),
("James Bennett", 1),
("Jeffrey Forcier", 1),
("Paul Bissex", 1),
("Peter Norvig", 2),
("Stuart Russell", 1),
("Wesley J. Chun", 1),
],
lambda a: (a.name, a.num_books),
)
def test_reverse_fkey_annotate(self):
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
self.assertQuerysetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 7),
(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
9,
),
("Practical Django Projects", 3),
("Python Web Development with Django", 7),
("Sams Teach Yourself Django in 24 Hours", 1),
("The Definitive Guide to Django: Web Development Done Right", 3),
],
lambda b: (b.name, b.publisher__num_awards__sum),
)
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
self.assertQuerysetEqual(
publishers,
[
("Apress", Decimal("59.69")),
("Jonno's House of Books", None),
("Morgan Kaufmann", Decimal("75.00")),
("Prentice Hall", Decimal("112.49")),
("Sams", Decimal("23.09")),
],
lambda p: (p.name, p.book__price__sum),
)
def test_annotate_values(self):
books = list(
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values()
)
self.assertEqual(
books,
[
{
"contact_id": self.a1.id,
"id": self.b1.id,
"isbn": "159059725",
"mean_age": 34.5,
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": self.p1.id,
"rating": 4.5,
}
],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values("pk", "isbn", "mean_age")
)
self.assertEqual(
list(books),
[
{
"pk": self.b1.pk,
"isbn": "159059725",
"mean_age": 34.5,
}
],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values("name")
)
self.assertEqual(
list(books),
[{"name": "The Definitive Guide to Django: Web Development Done Right"}],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.values()
.annotate(mean_age=Avg("authors__age"))
)
self.assertEqual(
list(books),
[
{
"contact_id": self.a1.id,
"id": self.b1.id,
"isbn": "159059725",
"mean_age": 34.5,
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": self.p1.id,
"rating": 4.5,
}
],
)
books = (
Book.objects.values("rating")
.annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age"))
.order_by("rating")
)
self.assertEqual(
list(books),
[
{
"rating": 3.0,
"n_authors": 1,
"mean_age": 45.0,
},
{
"rating": 4.0,
"n_authors": 6,
"mean_age": Approximate(37.16, places=1),
},
{
"rating": 4.5,
"n_authors": 2,
"mean_age": 34.5,
},
{
"rating": 5.0,
"n_authors": 1,
"mean_age": 57.0,
},
],
)
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
self.assertQuerysetEqual(
authors,
[
("Adrian Holovaty", 32.0),
("Brad Dayley", None),
("Jacob Kaplan-Moss", 29.5),
("James Bennett", 34.0),
("Jeffrey Forcier", 27.0),
("Paul Bissex", 31.0),
("Peter Norvig", 46.0),
("Stuart Russell", 57.0),
("Wesley J. Chun", Approximate(33.66, places=1)),
],
lambda a: (a.name, a.friends__age__avg),
)
def test_count(self):
vals = Book.objects.aggregate(Count("rating"))
self.assertEqual(vals, {"rating__count": 6})
def test_count_star(self):
with self.assertNumQueries(1) as ctx:
Book.objects.aggregate(n=Count("*"))
sql = ctx.captured_queries[0]["sql"]
self.assertIn("SELECT COUNT(*) ", sql)
def test_count_distinct_expression(self):
aggs = Book.objects.aggregate(
distinct_ratings=Count(
Case(When(pages__gt=300, then="rating")), distinct=True
),
)
self.assertEqual(aggs["distinct_ratings"], 4)
def test_distinct_on_aggregate(self):
for aggregate, expected_result in (
(Avg, 4.125),
(Count, 4),
(Sum, 16.5),
):
with self.subTest(aggregate=aggregate.__name__):
books = Book.objects.aggregate(
ratings=aggregate("rating", distinct=True)
)
self.assertEqual(books["ratings"], expected_result)
def test_non_grouped_annotation_not_in_group_by(self):
"""
An annotation not included in values() before an aggregate should be
excluded from the group by clause.
"""
qs = (
Book.objects.annotate(xprice=F("price"))
.filter(rating=4.0)
.values("rating")
.annotate(count=Count("publisher_id", distinct=True))
.values("count", "rating")
.order_by("count")
)
self.assertEqual(list(qs), [{"rating": 4.0, "count": 2}])
def test_grouped_annotation_in_group_by(self):
"""
An annotation included in values() before an aggregate should be
included in the group by clause.
"""
qs = (
Book.objects.annotate(xprice=F("price"))
.filter(rating=4.0)
.values("rating", "xprice")
.annotate(count=Count("publisher_id", distinct=True))
.values("count", "rating")
.order_by("count")
)
self.assertEqual(
list(qs),
[
{"rating": 4.0, "count": 1},
{"rating": 4.0, "count": 2},
],
)
def test_fkey_aggregate(self):
explicit = list(Author.objects.annotate(Count("book__id")))
implicit = list(Author.objects.annotate(Count("book")))
self.assertCountEqual(explicit, implicit)
def test_annotate_ordering(self):
books = (
Book.objects.values("rating")
.annotate(oldest=Max("authors__age"))
.order_by("oldest", "rating")
)
self.assertEqual(
list(books),
[
{"rating": 4.5, "oldest": 35},
{"rating": 3.0, "oldest": 45},
{"rating": 4.0, "oldest": 57},
{"rating": 5.0, "oldest": 57},
],
)
books = (
Book.objects.values("rating")
.annotate(oldest=Max("authors__age"))
.order_by("-oldest", "-rating")
)
self.assertEqual(
list(books),
[
{"rating": 5.0, "oldest": 57},
{"rating": 4.0, "oldest": 57},
{"rating": 3.0, "oldest": 45},
{"rating": 4.5, "oldest": 35},
],
)
def test_aggregate_annotation(self):
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(
Avg("num_authors")
)
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
def test_avg_duration_field(self):
# Explicit `output_field`.
self.assertEqual(
Publisher.objects.aggregate(Avg("duration", output_field=DurationField())),
{"duration__avg": datetime.timedelta(days=1, hours=12)},
)
# Implicit `output_field`.
self.assertEqual(
Publisher.objects.aggregate(Avg("duration")),
{"duration__avg": datetime.timedelta(days=1, hours=12)},
)
def test_sum_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Sum("duration", output_field=DurationField())),
{"duration__sum": datetime.timedelta(days=3)},
)
def test_sum_distinct_aggregate(self):
"""
Sum on a distinct() QuerySet should aggregate only the distinct items.
"""
authors = Author.objects.filter(book__in=[self.b5, self.b6])
self.assertEqual(authors.count(), 3)
distinct_authors = authors.distinct()
self.assertEqual(distinct_authors.count(), 2)
# Selected author ages are 57 and 46
age_sum = distinct_authors.aggregate(Sum("age"))
self.assertEqual(age_sum["age__sum"], 103)
def test_filtering(self):
p = Publisher.objects.create(name="Expensive Publisher", num_awards=0)
Book.objects.create(
name="ExpensiveBook1",
pages=1,
isbn="111",
rating=3.5,
price=Decimal("1000"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 1),
)
Book.objects.create(
name="ExpensiveBook2",
pages=1,
isbn="222",
rating=4.0,
price=Decimal("1000"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 2),
)
Book.objects.create(
name="ExpensiveBook3",
pages=1,
isbn="333",
rating=4.5,
price=Decimal("35"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 3),
)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
["Apress", "Prentice Hall", "Expensive Publisher"],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by(
"pk"
)
self.assertQuerysetEqual(
publishers,
[
"Apress",
"Apress",
"Sams",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1, book__price__lt=Decimal("40.0"))
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
["Apress", "Prentice Hall", "Expensive Publisher"],
lambda p: p.name,
)
publishers = (
Publisher.objects.filter(book__price__lt=Decimal("40.0"))
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerysetEqual(publishers, ["Apress"], lambda p: p.name)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__range=[1, 3])
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
[
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__range=[1, 2])
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
["Apress", "Sams", "Prentice Hall", "Morgan Kaufmann"],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__in=[1, 3])
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
["Sams", "Morgan Kaufmann", "Expensive Publisher"],
lambda p: p.name,
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(
num_books__isnull=True
)
self.assertEqual(len(publishers), 0)
def test_annotation(self):
vals = Author.objects.filter(pk=self.a1.pk).aggregate(Count("friends__id"))
self.assertEqual(vals, {"friends__id__count": 2})
books = (
Book.objects.annotate(num_authors=Count("authors__name"))
.filter(num_authors__exact=2)
.order_by("pk")
)
self.assertQuerysetEqual(
books,
[
"The Definitive Guide to Django: Web Development Done Right",
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name,
)
authors = (
Author.objects.annotate(num_friends=Count("friends__id", distinct=True))
.filter(num_friends=0)
.order_by("pk")
)
self.assertQuerysetEqual(authors, ["Brad Dayley"], lambda a: a.name)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerysetEqual(
publishers, ["Apress", "Prentice Hall"], lambda p: p.name
)
publishers = (
Publisher.objects.filter(book__price__lt=Decimal("40.0"))
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
)
self.assertQuerysetEqual(publishers, ["Apress"], lambda p: p.name)
books = Book.objects.annotate(num_authors=Count("authors__id")).filter(
authors__name__contains="Norvig", num_authors__gt=1
)
self.assertQuerysetEqual(
books, ["Artificial Intelligence: A Modern Approach"], lambda b: b.name
)
def test_more_aggregation(self):
a = Author.objects.get(name__contains="Norvig")
b = Book.objects.get(name__contains="Done Right")
b.authors.add(a)
b.save()
vals = (
Book.objects.annotate(num_authors=Count("authors__id"))
.filter(authors__name__contains="Norvig", num_authors__gt=1)
.aggregate(Avg("rating"))
)
self.assertEqual(vals, {"rating__avg": 4.25})
def test_even_more_aggregate(self):
publishers = (
Publisher.objects.annotate(
earliest_book=Min("book__pubdate"),
)
.exclude(earliest_book=None)
.order_by("earliest_book")
.values(
"earliest_book",
"num_awards",
"id",
"name",
)
)
self.assertEqual(
list(publishers),
[
{
"earliest_book": datetime.date(1991, 10, 15),
"num_awards": 9,
"id": self.p4.id,
"name": "Morgan Kaufmann",
},
{
"earliest_book": datetime.date(1995, 1, 15),
"num_awards": 7,
"id": self.p3.id,
"name": "Prentice Hall",
},
{
"earliest_book": datetime.date(2007, 12, 6),
"num_awards": 3,
"id": self.p1.id,
"name": "Apress",
},
{
"earliest_book": datetime.date(2008, 3, 3),
"num_awards": 1,
"id": self.p2.id,
"name": "Sams",
},
],
)
vals = Store.objects.aggregate(
Max("friday_night_closing"), Min("original_opening")
)
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
)
def test_annotate_values_list(self):
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("pk", "isbn", "mean_age")
)
self.assertEqual(list(books), [(self.b1.id, "159059725", 34.5)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("isbn")
)
self.assertEqual(list(books), [("159059725",)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("mean_age")
)
self.assertEqual(list(books), [(34.5,)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("mean_age", flat=True)
)
self.assertEqual(list(books), [34.5])
books = (
Book.objects.values_list("price")
.annotate(count=Count("price"))
.order_by("-count", "price")
)
self.assertEqual(
list(books),
[
(Decimal("29.69"), 2),
(Decimal("23.09"), 1),
(Decimal("30"), 1),
(Decimal("75"), 1),
(Decimal("82.8"), 1),
],
)
def test_dates_with_aggregation(self):
"""
.dates() returns a distinct set of dates when applied to a
QuerySet with aggregation.
Refs #18056. Previously, .dates() would return distinct (date_kind,
aggregation) sets, in this case (year, num_authors), so 2008 would be
returned twice because there are books from 2008 with a different
number of authors.
"""
dates = Book.objects.annotate(num_authors=Count("authors")).dates(
"pubdate", "year"
)
self.assertSequenceEqual(
dates,
[
datetime.date(1991, 1, 1),
datetime.date(1995, 1, 1),
datetime.date(2007, 1, 1),
datetime.date(2008, 1, 1),
],
)
def test_values_aggregation(self):
# Refs #20782
max_rating = Book.objects.values("rating").aggregate(max_rating=Max("rating"))
self.assertEqual(max_rating["max_rating"], 5)
max_books_per_rating = (
Book.objects.values("rating")
.annotate(books_per_rating=Count("id"))
.aggregate(Max("books_per_rating"))
)
self.assertEqual(max_books_per_rating, {"books_per_rating__max": 3})
def test_ticket17424(self):
"""
Doing exclude() on a foreign model after annotate() doesn't crash.
"""
all_books = list(Book.objects.values_list("pk", flat=True).order_by("pk"))
annotated_books = Book.objects.order_by("pk").annotate(one=Count("id"))
# The value doesn't matter, we just need any negative
# constraint on a related model that's a noop.
excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__")
# Try to generate query tree
str(excluded_books.query)
self.assertQuerysetEqual(excluded_books, all_books, lambda x: x.pk)
# Check internal state
self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type)
self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type)
def test_ticket12886(self):
"""
Aggregation over sliced queryset works correctly.
"""
qs = Book.objects.order_by("-rating")[0:3]
vals = qs.aggregate(average_top3_rating=Avg("rating"))["average_top3_rating"]
self.assertAlmostEqual(vals, 4.5, places=2)
def test_ticket11881(self):
"""
Subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE or
select_related() stuff.
"""
qs = (
Book.objects.select_for_update()
.order_by("pk")
.select_related("publisher")
.annotate(max_pk=Max("pk"))
)
with CaptureQueriesContext(connection) as captured_queries:
qs.aggregate(avg_pk=Avg("max_pk"))
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]["sql"].lower()
self.assertNotIn("for update", qstr)
forced_ordering = connection.ops.force_no_ordering()
if forced_ordering:
# If the backend needs to force an ordering we make sure it's
# the only "ORDER BY" clause present in the query.
self.assertEqual(
re.findall(r"order by (\w+)", qstr),
[", ".join(f[1][0] for f in forced_ordering).lower()],
)
else:
self.assertNotIn("order by", qstr)
self.assertEqual(qstr.count(" join "), 0)
def test_decimal_max_digits_has_no_effect(self):
Book.objects.all().delete()
a1 = Author.objects.first()
p1 = Publisher.objects.first()
thedate = timezone.now()
for i in range(10):
Book.objects.create(
isbn="abcde{}".format(i),
name="none",
pages=10,
rating=4.0,
price=9999.98,
contact=a1,
publisher=p1,
pubdate=thedate,
)
book = Book.objects.aggregate(price_sum=Sum("price"))
self.assertEqual(book["price_sum"], Decimal("99999.80"))
def test_nonaggregate_aggregation_throws(self):
with self.assertRaisesMessage(TypeError, "fail is not an aggregate expression"):
Book.objects.aggregate(fail=F("price"))
def test_nonfield_annotation(self):
book = Book.objects.annotate(val=Max(Value(2))).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(
val=Max(Value(2), output_field=IntegerField())
).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(2, output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
def test_annotation_expressions(self):
authors = Author.objects.annotate(
combined_ages=Sum(F("age") + F("friends__age"))
).order_by("name")
authors2 = Author.objects.annotate(
combined_ages=Sum("age") + Sum("friends__age")
).order_by("name")
for qs in (authors, authors2):
self.assertQuerysetEqual(
qs,
[
("Adrian Holovaty", 132),
("Brad Dayley", None),
("Jacob Kaplan-Moss", 129),
("James Bennett", 63),
("Jeffrey Forcier", 128),
("Paul Bissex", 120),
("Peter Norvig", 103),
("Stuart Russell", 103),
("Wesley J. Chun", 176),
],
lambda a: (a.name, a.combined_ages),
)
def test_aggregation_expressions(self):
a1 = Author.objects.aggregate(av_age=Sum("age") / Count("*"))
a2 = Author.objects.aggregate(av_age=Sum("age") / Count("age"))
a3 = Author.objects.aggregate(av_age=Avg("age"))
self.assertEqual(a1, {"av_age": 37})
self.assertEqual(a2, {"av_age": 37})
self.assertEqual(a3, {"av_age": Approximate(37.4, places=1)})
def test_avg_decimal_field(self):
v = Book.objects.filter(rating=4).aggregate(avg_price=(Avg("price")))[
"avg_price"
]
self.assertIsInstance(v, Decimal)
self.assertEqual(v, Approximate(Decimal("47.39"), places=2))
def test_order_of_precedence(self):
p1 = Book.objects.filter(rating=4).aggregate(avg_price=(Avg("price") + 2) * 3)
self.assertEqual(p1, {"avg_price": Approximate(Decimal("148.18"), places=2)})
p2 = Book.objects.filter(rating=4).aggregate(avg_price=Avg("price") + 2 * 3)
self.assertEqual(p2, {"avg_price": Approximate(Decimal("53.39"), places=2)})
def test_combine_different_types(self):
msg = (
"Cannot infer type of '+' expression involving these types: FloatField, "
"DecimalField. You must set output_field."
)
qs = Book.objects.annotate(sums=Sum("rating") + Sum("pages") + Sum("price"))
with self.assertRaisesMessage(FieldError, msg):
qs.first()
with self.assertRaisesMessage(FieldError, msg):
qs.first()
b1 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=IntegerField())
).get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
b2 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=FloatField())
).get(pk=self.b4.pk)
self.assertEqual(b2.sums, 383.69)
b3 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=DecimalField())
).get(pk=self.b4.pk)
self.assertEqual(b3.sums, Approximate(Decimal("383.69"), places=2))
def test_complex_aggregations_require_kwarg(self):
with self.assertRaisesMessage(
TypeError, "Complex annotations require an alias"
):
Author.objects.annotate(Sum(F("age") + F("friends__age")))
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Author.objects.aggregate(Sum("age") / Count("age"))
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Author.objects.aggregate(Sum(1))
def test_aggregate_over_complex_annotation(self):
qs = Author.objects.annotate(combined_ages=Sum(F("age") + F("friends__age")))
age = qs.aggregate(max_combined_age=Max("combined_ages"))
self.assertEqual(age["max_combined_age"], 176)
age = qs.aggregate(max_combined_age_doubled=Max("combined_ages") * 2)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages")
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages"),
sum_combined_age=Sum("combined_ages"),
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
self.assertEqual(age["sum_combined_age"], 954)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages"),
sum_combined_age_doubled=Sum("combined_ages") + Sum("combined_ages"),
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
self.assertEqual(age["sum_combined_age_doubled"], 954 * 2)
def test_values_annotation_with_expression(self):
# ensure the F() is promoted to the group by clause
qs = Author.objects.values("name").annotate(another_age=Sum("age") + F("age"))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a["another_age"], 68)
qs = qs.annotate(friend_count=Count("friends"))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a["friend_count"], 2)
qs = (
qs.annotate(combined_age=Sum("age") + F("friends__age"))
.filter(name="Adrian Holovaty")
.order_by("-combined_age")
)
self.assertEqual(
list(qs),
[
{
"name": "Adrian Holovaty",
"another_age": 68,
"friend_count": 1,
"combined_age": 69,
},
{
"name": "Adrian Holovaty",
"another_age": 68,
"friend_count": 1,
"combined_age": 63,
},
],
)
vals = qs.values("name", "combined_age")
self.assertEqual(
list(vals),
[
{"name": "Adrian Holovaty", "combined_age": 69},
{"name": "Adrian Holovaty", "combined_age": 63},
],
)
def test_annotate_values_aggregate(self):
alias_age = (
Author.objects.annotate(age_alias=F("age"))
.values(
"age_alias",
)
.aggregate(sum_age=Sum("age_alias"))
)
age = Author.objects.values("age").aggregate(sum_age=Sum("age"))
self.assertEqual(alias_age["sum_age"], age["sum_age"])
def test_annotate_over_annotate(self):
author = (
Author.objects.annotate(age_alias=F("age"))
.annotate(sum_age=Sum("age_alias"))
.get(name="Adrian Holovaty")
)
other_author = Author.objects.annotate(sum_age=Sum("age")).get(
name="Adrian Holovaty"
)
self.assertEqual(author.sum_age, other_author.sum_age)
def test_aggregate_over_aggregate(self):
msg = "Cannot compute Avg('age'): 'age' is an aggregate"
with self.assertRaisesMessage(FieldError, msg):
Author.objects.annotate(age_alias=F("age"),).aggregate(
age=Sum(F("age")),
avg_age=Avg(F("age")),
)
def test_annotated_aggregate_over_annotated_aggregate(self):
with self.assertRaisesMessage(
FieldError, "Cannot compute Sum('id__max'): 'id__max' is an aggregate"
):
Book.objects.annotate(Max("id")).annotate(Sum("id__max"))
class MyMax(Max):
def as_sql(self, compiler, connection):
self.set_source_expressions(self.get_source_expressions()[0:1])
return super().as_sql(compiler, connection)
with self.assertRaisesMessage(
FieldError, "Cannot compute Max('id__max'): 'id__max' is an aggregate"
):
Book.objects.annotate(Max("id")).annotate(my_max=MyMax("id__max", "price"))
def test_multi_arg_aggregate(self):
class MyMax(Max):
output_field = DecimalField()
def as_sql(self, compiler, connection):
copy = self.copy()
copy.set_source_expressions(copy.get_source_expressions()[0:1])
return super(MyMax, copy).as_sql(compiler, connection)
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Book.objects.aggregate(MyMax("pages", "price"))
with self.assertRaisesMessage(
TypeError, "Complex annotations require an alias"
):
Book.objects.annotate(MyMax("pages", "price"))
Book.objects.aggregate(max_field=MyMax("pages", "price"))
def test_add_implementation(self):
class MySum(Sum):
pass
# test completely changing how the output is rendered
def lower_case_function_override(self, compiler, connection):
sql, params = compiler.compile(self.source_expressions[0])
substitutions = {
"function": self.function.lower(),
"expressions": sql,
"distinct": "",
}
substitutions.update(self.extra)
return self.template % substitutions, params
setattr(MySum, "as_" + connection.vendor, lower_case_function_override)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("sum("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test changing the dict and delegating
def lower_case_function_super(self, compiler, connection):
self.extra["function"] = self.function.lower()
return super(MySum, self).as_sql(compiler, connection)
setattr(MySum, "as_" + connection.vendor, lower_case_function_super)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("sum("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test overriding all parts of the template
def be_evil(self, compiler, connection):
substitutions = {"function": "MAX", "expressions": "2", "distinct": ""}
substitutions.update(self.extra)
return self.template % substitutions, ()
setattr(MySum, "as_" + connection.vendor, be_evil)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("MAX("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 2)
def test_complex_values_aggregation(self):
max_rating = Book.objects.values("rating").aggregate(
double_max_rating=Max("rating") + Max("rating")
)
self.assertEqual(max_rating["double_max_rating"], 5 * 2)
max_books_per_rating = (
Book.objects.values("rating")
.annotate(books_per_rating=Count("id") + 5)
.aggregate(Max("books_per_rating"))
)
self.assertEqual(max_books_per_rating, {"books_per_rating__max": 3 + 5})
def test_expression_on_aggregation(self):
qs = (
Publisher.objects.annotate(
price_or_median=Greatest(
Avg("book__rating", output_field=DecimalField()), Avg("book__price")
)
)
.filter(price_or_median__gte=F("num_awards"))
.order_by("num_awards")
)
self.assertQuerysetEqual(qs, [1, 3, 7, 9], lambda v: v.num_awards)
qs2 = (
Publisher.objects.annotate(
rating_or_num_awards=Greatest(
Avg("book__rating"), F("num_awards"), output_field=FloatField()
)
)
.filter(rating_or_num_awards__gt=F("num_awards"))
.order_by("num_awards")
)
self.assertQuerysetEqual(qs2, [1, 3], lambda v: v.num_awards)
def test_arguments_must_be_expressions(self):
msg = "QuerySet.aggregate() received non-expression(s): %s."
with self.assertRaisesMessage(TypeError, msg % FloatField()):
Book.objects.aggregate(FloatField())
with self.assertRaisesMessage(TypeError, msg % True):
Book.objects.aggregate(is_book=True)
with self.assertRaisesMessage(
TypeError, msg % ", ".join([str(FloatField()), "True"])
):
Book.objects.aggregate(FloatField(), Avg("price"), is_book=True)
def test_aggregation_subquery_annotation(self):
"""Subquery annotations are excluded from the GROUP BY if they are
not explicitly grouped against."""
latest_book_pubdate_qs = (
Book.objects.filter(publisher=OuterRef("pk"))
.order_by("-pubdate")
.values("pubdate")[:1]
)
publisher_qs = Publisher.objects.annotate(
latest_book_pubdate=Subquery(latest_book_pubdate_qs),
).annotate(count=Count("book"))
with self.assertNumQueries(1) as ctx:
list(publisher_qs)
self.assertEqual(ctx[0]["sql"].count("SELECT"), 2)
# The GROUP BY should not be by alias either.
self.assertEqual(ctx[0]["sql"].lower().count("latest_book_pubdate"), 1)
def test_aggregation_subquery_annotation_exists(self):
latest_book_pubdate_qs = (
Book.objects.filter(publisher=OuterRef("pk"))
.order_by("-pubdate")
.values("pubdate")[:1]
)
publisher_qs = Publisher.objects.annotate(
latest_book_pubdate=Subquery(latest_book_pubdate_qs),
count=Count("book"),
)
self.assertTrue(publisher_qs.exists())
def test_aggregation_filter_exists(self):
publishers_having_more_than_one_book_qs = (
Book.objects.values("publisher")
.annotate(cnt=Count("isbn"))
.filter(cnt__gt=1)
)
query = publishers_having_more_than_one_book_qs.query.exists(
using=connection.alias
)
_, _, group_by = query.get_compiler(connection=connection).pre_sql_setup()
self.assertEqual(len(group_by), 1)
def test_aggregation_exists_annotation(self):
published_books = Book.objects.filter(publisher=OuterRef("pk"))
publisher_qs = Publisher.objects.annotate(
published_book=Exists(published_books),
count=Count("book"),
).values_list("name", flat=True)
self.assertCountEqual(
list(publisher_qs),
[
"Apress",
"Morgan Kaufmann",
"Jonno's House of Books",
"Prentice Hall",
"Sams",
],
)
def test_aggregation_subquery_annotation_values(self):
"""
Subquery annotations and external aliases are excluded from the GROUP
BY if they are not selected.
"""
books_qs = (
Book.objects.annotate(
first_author_the_same_age=Subquery(
Author.objects.filter(
age=OuterRef("contact__friends__age"),
)
.order_by("age")
.values("id")[:1],
)
)
.filter(
publisher=self.p1,
first_author_the_same_age__isnull=False,
)
.annotate(
min_age=Min("contact__friends__age"),
)
.values("name", "min_age")
.order_by("name")
)
self.assertEqual(
list(books_qs),
[
{"name": "Practical Django Projects", "min_age": 34},
{
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"min_age": 29,
},
],
)
def test_aggregation_subquery_annotation_values_collision(self):
books_rating_qs = Book.objects.filter(
publisher=OuterRef("pk"),
price=Decimal("29.69"),
).values("rating")
publisher_qs = (
Publisher.objects.filter(
book__contact__age__gt=20,
name=self.p1.name,
)
.annotate(
rating=Subquery(books_rating_qs),
contacts_count=Count("book__contact"),
)
.values("rating")
.annotate(total_count=Count("rating"))
)
self.assertEqual(
list(publisher_qs),
[
{"rating": 4.0, "total_count": 2},
],
)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_subquery_annotation_multivalued(self):
"""
Subquery annotations must be included in the GROUP BY if they use
potentially multivalued relations (contain the LOOKUP_SEP).
"""
subquery_qs = Author.objects.filter(
pk=OuterRef("pk"),
book__name=OuterRef("book__name"),
).values("pk")
author_qs = Author.objects.annotate(
subquery_id=Subquery(subquery_qs),
).annotate(count=Count("book"))
self.assertEqual(author_qs.count(), Author.objects.count())
def test_aggregation_order_by_not_selected_annotation_values(self):
result_asc = [
self.b4.pk,
self.b3.pk,
self.b1.pk,
self.b2.pk,
self.b5.pk,
self.b6.pk,
]
result_desc = result_asc[::-1]
tests = [
("min_related_age", result_asc),
("-min_related_age", result_desc),
(F("min_related_age"), result_asc),
(F("min_related_age").asc(), result_asc),
(F("min_related_age").desc(), result_desc),
]
for ordering, expected_result in tests:
with self.subTest(ordering=ordering):
books_qs = (
Book.objects.annotate(
min_age=Min("authors__age"),
)
.annotate(
min_related_age=Coalesce("min_age", "contact__age"),
)
.order_by(ordering)
.values_list("pk", flat=True)
)
self.assertEqual(list(books_qs), expected_result)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_group_by_subquery_annotation(self):
"""
Subquery annotations are included in the GROUP BY if they are
grouped against.
"""
long_books_count_qs = (
Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=400,
)
.values("publisher")
.annotate(count=Count("pk"))
.values("count")
)
groups = [
Subquery(long_books_count_qs),
long_books_count_qs,
long_books_count_qs.query,
]
for group in groups:
with self.subTest(group=group.__class__.__name__):
long_books_count_breakdown = Publisher.objects.values_list(
group,
).annotate(total=Count("*"))
self.assertEqual(dict(long_books_count_breakdown), {None: 1, 1: 4})
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_group_by_exists_annotation(self):
"""
Exists annotations are included in the GROUP BY if they are
grouped against.
"""
long_books_qs = Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=800,
)
has_long_books_breakdown = Publisher.objects.values_list(
Exists(long_books_qs),
).annotate(total=Count("*"))
self.assertEqual(dict(has_long_books_breakdown), {True: 2, False: 3})
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_subquery_annotation_related_field(self):
publisher = Publisher.objects.create(name=self.a9.name, num_awards=2)
book = Book.objects.create(
isbn="159059999",
name="Test book.",
pages=819,
rating=2.5,
price=Decimal("14.44"),
contact=self.a9,
publisher=publisher,
pubdate=datetime.date(2019, 12, 6),
)
book.authors.add(self.a5, self.a6, self.a7)
books_qs = (
Book.objects.annotate(
contact_publisher=Subquery(
Publisher.objects.filter(
pk=OuterRef("publisher"),
name=OuterRef("contact__name"),
).values("name")[:1],
)
)
.filter(
contact_publisher__isnull=False,
)
.annotate(count=Count("authors"))
)
self.assertSequenceEqual(books_qs, [book])
# FIXME: GROUP BY doesn't need to include a subquery with
# non-multivalued JOINs, see Col.possibly_multivalued (refs #31150):
# with self.assertNumQueries(1) as ctx:
# self.assertSequenceEqual(books_qs, [book])
# self.assertEqual(ctx[0]['sql'].count('SELECT'), 2)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_nested_subquery_outerref(self):
publisher_with_same_name = Publisher.objects.filter(
id__in=Subquery(
Publisher.objects.filter(
name=OuterRef(OuterRef("publisher__name")),
).values("id"),
),
).values(publisher_count=Count("id"))[:1]
books_breakdown = Book.objects.annotate(
publisher_count=Subquery(publisher_with_same_name),
authors_count=Count("authors"),
).values_list("publisher_count", flat=True)
self.assertSequenceEqual(books_breakdown, [1] * 6)
def test_filter_in_subquery_or_aggregation(self):
"""
Filtering against an aggregate requires the usage of the HAVING clause.
If such a filter is unionized to a non-aggregate one the latter will
also need to be moved to the HAVING clause and have its grouping
columns used in the GROUP BY.
When this is done with a subquery the specialized logic in charge of
using outer reference columns to group should be used instead of the
subquery itself as the latter might return multiple rows.
"""
authors = Author.objects.annotate(
Count("book"),
).filter(Q(book__count__gt=0) | Q(pk__in=Book.objects.values("authors")))
self.assertQuerysetEqual(authors, Author.objects.all(), ordered=False)
def test_aggregation_random_ordering(self):
"""Random() is not included in the GROUP BY when used for ordering."""
authors = Author.objects.annotate(contact_count=Count("book")).order_by("?")
self.assertQuerysetEqual(
authors,
[
("Adrian Holovaty", 1),
("Jacob Kaplan-Moss", 1),
("Brad Dayley", 1),
("James Bennett", 1),
("Jeffrey Forcier", 1),
("Paul Bissex", 1),
("Wesley J. Chun", 1),
("Stuart Russell", 1),
("Peter Norvig", 2),
],
lambda a: (a.name, a.contact_count),
ordered=False,
)
def test_empty_result_optimization(self):
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Sum("num_awards"),
books_count=Count("book"),
),
{
"sum_awards": None,
"books_count": 0,
},
)
# Expression without empty_result_set_value forces queries to be
# executed even if they would return an empty result set.
raw_books_count = Func("book", function="COUNT")
raw_books_count.contains_aggregate = True
with self.assertNumQueries(1):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Sum("num_awards"),
books_count=raw_books_count,
),
{
"sum_awards": None,
"books_count": 0,
},
)
def test_coalesced_empty_result_set(self):
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Sum("num_awards"), 0),
)["sum_awards"],
0,
)
# Multiple expressions.
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Sum("num_awards"), None, 0),
)["sum_awards"],
0,
)
# Nested coalesce.
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Coalesce(Sum("num_awards"), None), 0),
)["sum_awards"],
0,
)
# Expression coalesce.
with self.assertNumQueries(1):
self.assertIsInstance(
Store.objects.none().aggregate(
latest_opening=Coalesce(
Max("original_opening"),
RawSQL("CURRENT_TIMESTAMP", []),
),
)["latest_opening"],
datetime.datetime,
)
def test_aggregation_default_unsupported_by_count(self):
msg = "Count does not allow default."
with self.assertRaisesMessage(TypeError, msg):
Count("age", default=0)
def test_aggregation_default_unset(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age"),
)
self.assertIsNone(result["value"])
def test_aggregation_default_zero(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=0),
)
self.assertEqual(result["value"], 0)
def test_aggregation_default_integer(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=21),
)
self.assertEqual(result["value"], 21)
def test_aggregation_default_expression(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=Value(5) * Value(7)),
)
self.assertEqual(result["value"], 35)
def test_aggregation_default_group_by(self):
qs = (
Publisher.objects.values("name")
.annotate(
books=Count("book"),
pages=Sum("book__pages", default=0),
)
.filter(books=0)
)
self.assertSequenceEqual(
qs,
[{"name": "Jonno's House of Books", "books": 0, "pages": 0}],
)
def test_aggregation_default_compound_expression(self):
# Scale rating to a percentage; default to 50% if no books published.
formula = Avg("book__rating", default=2.5) * 20.0
queryset = Publisher.objects.annotate(rating=formula).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "rating"),
[
{"name": "Apress", "rating": 85.0},
{"name": "Jonno's House of Books", "rating": 50.0},
{"name": "Morgan Kaufmann", "rating": 100.0},
{"name": "Prentice Hall", "rating": 80.0},
{"name": "Sams", "rating": 60.0},
],
)
def test_aggregation_default_using_time_from_python(self):
expr = Min(
"store__friday_night_closing",
filter=~Q(store__name="Amazon.com"),
default=datetime.time(17),
)
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL 8.0+ & MariaDB.
expr.default = Cast(expr.default, TimeField())
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{"isbn": "013235613", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "013790395",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "067232959", "oldest_store_opening": datetime.time(17)},
{"isbn": "155860191", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "159059725",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "159059996", "oldest_store_opening": datetime.time(21, 30)},
],
)
def test_aggregation_default_using_time_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min(
"store__friday_night_closing",
filter=~Q(store__name="Amazon.com"),
default=TruncHour(NowUTC(), output_field=TimeField()),
)
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{"isbn": "013235613", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "013790395",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "067232959", "oldest_store_opening": datetime.time(now.hour)},
{"isbn": "155860191", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "159059725",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "159059996", "oldest_store_opening": datetime.time(21, 30)},
],
)
def test_aggregation_default_using_date_from_python(self):
expr = Min("book__pubdate", default=datetime.date(1970, 1, 1))
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL 5.7+ & MariaDB.
expr.default = Cast(expr.default, DateField())
queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "earliest_pubdate"),
[
{"name": "Apress", "earliest_pubdate": datetime.date(2007, 12, 6)},
{
"name": "Jonno's House of Books",
"earliest_pubdate": datetime.date(1970, 1, 1),
},
{
"name": "Morgan Kaufmann",
"earliest_pubdate": datetime.date(1991, 10, 15),
},
{
"name": "Prentice Hall",
"earliest_pubdate": datetime.date(1995, 1, 15),
},
{"name": "Sams", "earliest_pubdate": datetime.date(2008, 3, 3)},
],
)
def test_aggregation_default_using_date_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min("book__pubdate", default=TruncDate(NowUTC()))
queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "earliest_pubdate"),
[
{"name": "Apress", "earliest_pubdate": datetime.date(2007, 12, 6)},
{"name": "Jonno's House of Books", "earliest_pubdate": now.date()},
{
"name": "Morgan Kaufmann",
"earliest_pubdate": datetime.date(1991, 10, 15),
},
{
"name": "Prentice Hall",
"earliest_pubdate": datetime.date(1995, 1, 15),
},
{"name": "Sams", "earliest_pubdate": datetime.date(2008, 3, 3)},
],
)
def test_aggregation_default_using_datetime_from_python(self):
expr = Min(
"store__original_opening",
filter=~Q(store__name="Amazon.com"),
default=datetime.datetime(1970, 1, 1),
)
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL 8.0+ & MariaDB.
expr.default = Cast(expr.default, DateTimeField())
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{
"isbn": "013235613",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "013790395",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "067232959",
"oldest_store_opening": datetime.datetime(1970, 1, 1),
},
{
"isbn": "155860191",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "159059725",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "159059996",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
],
)
def test_aggregation_default_using_datetime_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min(
"store__original_opening",
filter=~Q(store__name="Amazon.com"),
default=TruncHour(NowUTC(), output_field=DateTimeField()),
)
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{
"isbn": "013235613",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "013790395",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "067232959",
"oldest_store_opening": now.replace(
minute=0, second=0, microsecond=0, tzinfo=None
),
},
{
"isbn": "155860191",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "159059725",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "159059996",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
],
)
def test_aggregation_default_using_duration_from_python(self):
result = Publisher.objects.filter(num_awards__gt=3).aggregate(
value=Sum("duration", default=datetime.timedelta(0)),
)
self.assertEqual(result["value"], datetime.timedelta(0))
def test_aggregation_default_using_duration_from_database(self):
result = Publisher.objects.filter(num_awards__gt=3).aggregate(
value=Sum("duration", default=Now() - Now()),
)
self.assertEqual(result["value"], datetime.timedelta(0))
def test_aggregation_default_using_decimal_from_python(self):
result = Book.objects.filter(rating__lt=3.0).aggregate(
value=Sum("price", default=Decimal("0.00")),
)
self.assertEqual(result["value"], Decimal("0.00"))
def test_aggregation_default_using_decimal_from_database(self):
result = Book.objects.filter(rating__lt=3.0).aggregate(
value=Sum("price", default=Pi()),
)
self.assertAlmostEqual(result["value"], Decimal.from_float(math.pi), places=6)
def test_aggregation_default_passed_another_aggregate(self):
result = Book.objects.aggregate(
value=Sum("price", filter=Q(rating__lt=3.0), default=Avg("pages") / 10.0),
)
self.assertAlmostEqual(result["value"], Decimal("61.72"), places=2)
def test_aggregation_default_after_annotation(self):
result = Publisher.objects.annotate(
double_num_awards=F("num_awards") * 2,
).aggregate(value=Sum("double_num_awards", default=0))
self.assertEqual(result["value"], 40)
def test_aggregation_default_not_in_aggregate(self):
result = Publisher.objects.annotate(
avg_rating=Avg("book__rating", default=2.5),
).aggregate(Sum("num_awards"))
self.assertEqual(result["num_awards__sum"], 20)
def test_exists_none_with_aggregate(self):
qs = Book.objects.annotate(
count=Count("id"),
exists=Exists(Author.objects.none()),
)
self.assertEqual(len(qs), 6)
def test_alias_sql_injection(self):
crafted_alias = """injected_name" from "aggregation_author"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Author.objects.aggregate(**{crafted_alias: Avg("age")})
def test_exists_extra_where_with_aggregate(self):
qs = Book.objects.annotate(
count=Count("id"),
exists=Exists(Author.objects.extra(where=["1=0"])),
)
self.assertEqual(len(qs), 6)
|
b476a402ad125936fa4636e71197175dcb289faa70f5d737873ccae166b7d0f7 | from functools import wraps
from django.db import IntegrityError, connections, transaction
from django.test import TestCase, skipUnlessDBFeature
from django.test.testcases import DatabaseOperationForbidden, TestData
from .models import Car, Person, PossessedCar
class TestTestCase(TestCase):
@skipUnlessDBFeature("can_defer_constraint_checks")
@skipUnlessDBFeature("supports_foreign_keys")
def test_fixture_teardown_checks_constraints(self):
rollback_atomics = self._rollback_atomics
self._rollback_atomics = lambda connection: None # noop
try:
car = PossessedCar.objects.create(car_id=1, belongs_to_id=1)
with self.assertRaises(IntegrityError), transaction.atomic():
self._fixture_teardown()
car.delete()
finally:
self._rollback_atomics = rollback_atomics
def test_disallowed_database_connection(self):
message = (
"Database connections to 'other' are not allowed in this test. "
"Add 'other' to test_utils.test_testcase.TestTestCase.databases to "
"ensure proper test isolation and silence this failure."
)
with self.assertRaisesMessage(DatabaseOperationForbidden, message):
connections["other"].connect()
with self.assertRaisesMessage(DatabaseOperationForbidden, message):
connections["other"].temporary_connection()
def test_disallowed_database_queries(self):
message = (
"Database queries to 'other' are not allowed in this test. "
"Add 'other' to test_utils.test_testcase.TestTestCase.databases to "
"ensure proper test isolation and silence this failure."
)
with self.assertRaisesMessage(DatabaseOperationForbidden, message):
Car.objects.using("other").get()
@skipUnlessDBFeature("supports_transactions")
def test_reset_sequences(self):
old_reset_sequences = self.reset_sequences
self.reset_sequences = True
msg = "reset_sequences cannot be used on TestCase instances"
try:
with self.assertRaisesMessage(TypeError, msg):
self._fixture_setup()
finally:
self.reset_sequences = old_reset_sequences
def assert_no_queries(test):
@wraps(test)
def inner(self):
with self.assertNumQueries(0):
test(self)
return inner
# On databases with no transaction support (for instance, MySQL with the MyISAM
# engine), setUpTestData() is called before each test, so there is no need to
# clone class level test data.
@skipUnlessDBFeature("supports_transactions")
class TestDataTests(TestCase):
# setUpTestData re-assignment are also wrapped in TestData.
jim_douglas = None
@classmethod
def setUpTestData(cls):
cls.jim_douglas = Person.objects.create(name="Jim Douglas")
cls.car = Car.objects.create(name="1963 Volkswagen Beetle")
cls.herbie = cls.jim_douglas.possessed_cars.create(
car=cls.car,
belongs_to=cls.jim_douglas,
)
cls.person_binary = Person.objects.create(name="Person", data=b"binary data")
cls.person_binary_get = Person.objects.get(pk=cls.person_binary.pk)
@assert_no_queries
def test_class_attribute_equality(self):
"""Class level test data is equal to instance level test data."""
self.assertEqual(self.jim_douglas, self.__class__.jim_douglas)
self.assertEqual(self.person_binary, self.__class__.person_binary)
self.assertEqual(self.person_binary_get, self.__class__.person_binary_get)
@assert_no_queries
def test_class_attribute_identity(self):
"""
Class level test data is not identical to instance level test data.
"""
self.assertIsNot(self.jim_douglas, self.__class__.jim_douglas)
self.assertIsNot(self.person_binary, self.__class__.person_binary)
self.assertIsNot(self.person_binary_get, self.__class__.person_binary_get)
@assert_no_queries
def test_binaryfield_data_type(self):
self.assertEqual(bytes(self.person_binary.data), b"binary data")
self.assertEqual(bytes(self.person_binary_get.data), b"binary data")
self.assertEqual(
type(self.person_binary_get.data),
type(self.__class__.person_binary_get.data),
)
self.assertEqual(
type(self.person_binary.data),
type(self.__class__.person_binary.data),
)
@assert_no_queries
def test_identity_preservation(self):
"""Identity of test data is preserved between accesses."""
self.assertIs(self.jim_douglas, self.jim_douglas)
@assert_no_queries
def test_known_related_objects_identity_preservation(self):
"""Known related objects identity is preserved."""
self.assertIs(self.herbie.car, self.car)
self.assertIs(self.herbie.belongs_to, self.jim_douglas)
def test_repr(self):
self.assertEqual(
repr(TestData("attr", "value")),
"<TestData: name='attr', data='value'>",
)
class SetupTestDataIsolationTests(TestCase):
"""
In-memory data isolation is respected for model instances assigned to class
attributes during setUpTestData.
"""
@classmethod
def setUpTestData(cls):
cls.car = Car.objects.create(name="Volkswagen Beetle")
def test_book_name_deutsh(self):
self.assertEqual(self.car.name, "Volkswagen Beetle")
self.car.name = "VW sKäfer"
self.car.save()
def test_book_name_french(self):
self.assertEqual(self.car.name, "Volkswagen Beetle")
self.car.name = "Volkswagen Coccinelle"
self.car.save()
|
9ae06c8c380b4415d0f10546abf9a6c3c5c7e5cede83958c6c0497c91866403a | import logging
import os
import unittest
import warnings
from io import StringIO
from unittest import mock
from django.conf import settings
from django.contrib.staticfiles.finders import get_finder, get_finders
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import default_storage
from django.db import (
IntegrityError,
connection,
connections,
models,
router,
transaction,
)
from django.forms import (
CharField,
EmailField,
Form,
IntegerField,
ValidationError,
formset_factory,
)
from django.http import HttpResponse
from django.template.loader import render_to_string
from django.test import (
SimpleTestCase,
TestCase,
TransactionTestCase,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.html import HTMLParseError, parse_html
from django.test.testcases import DatabaseOperationForbidden
from django.test.utils import (
CaptureQueriesContext,
TestContextDecorator,
ignore_warnings,
isolate_apps,
override_settings,
setup_test_environment,
)
from django.urls import NoReverseMatch, path, reverse, reverse_lazy
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.log import DEFAULT_LOGGING
from django.utils.version import PY311
from .models import Car, Person, PossessedCar
from .views import empty_response
class SkippingTestCase(SimpleTestCase):
def _assert_skipping(self, func, expected_exc, msg=None):
try:
if msg is not None:
with self.assertRaisesMessage(expected_exc, msg):
func()
else:
with self.assertRaises(expected_exc):
func()
except unittest.SkipTest:
self.fail("%s should not result in a skipped test." % func.__name__)
def test_skip_unless_db_feature(self):
"""
Testing the django.test.skipUnlessDBFeature decorator.
"""
# Total hack, but it works, just want an attribute that's always true.
@skipUnlessDBFeature("__class__")
def test_func():
raise ValueError
@skipUnlessDBFeature("notprovided")
def test_func2():
raise ValueError
@skipUnlessDBFeature("__class__", "__class__")
def test_func3():
raise ValueError
@skipUnlessDBFeature("__class__", "notprovided")
def test_func4():
raise ValueError
self._assert_skipping(test_func, ValueError)
self._assert_skipping(test_func2, unittest.SkipTest)
self._assert_skipping(test_func3, ValueError)
self._assert_skipping(test_func4, unittest.SkipTest)
class SkipTestCase(SimpleTestCase):
@skipUnlessDBFeature("missing")
def test_foo(self):
pass
self._assert_skipping(
SkipTestCase("test_foo").test_foo,
ValueError,
"skipUnlessDBFeature cannot be used on test_foo (test_utils.tests."
"SkippingTestCase.test_skip_unless_db_feature.<locals>.SkipTestCase%s) "
"as SkippingTestCase.test_skip_unless_db_feature.<locals>.SkipTestCase "
"doesn't allow queries against the 'default' database."
# Python 3.11 uses fully qualified test name in the output.
% (".test_foo" if PY311 else ""),
)
def test_skip_if_db_feature(self):
"""
Testing the django.test.skipIfDBFeature decorator.
"""
@skipIfDBFeature("__class__")
def test_func():
raise ValueError
@skipIfDBFeature("notprovided")
def test_func2():
raise ValueError
@skipIfDBFeature("__class__", "__class__")
def test_func3():
raise ValueError
@skipIfDBFeature("__class__", "notprovided")
def test_func4():
raise ValueError
@skipIfDBFeature("notprovided", "notprovided")
def test_func5():
raise ValueError
self._assert_skipping(test_func, unittest.SkipTest)
self._assert_skipping(test_func2, ValueError)
self._assert_skipping(test_func3, unittest.SkipTest)
self._assert_skipping(test_func4, unittest.SkipTest)
self._assert_skipping(test_func5, ValueError)
class SkipTestCase(SimpleTestCase):
@skipIfDBFeature("missing")
def test_foo(self):
pass
self._assert_skipping(
SkipTestCase("test_foo").test_foo,
ValueError,
"skipIfDBFeature cannot be used on test_foo (test_utils.tests."
"SkippingTestCase.test_skip_if_db_feature.<locals>.SkipTestCase%s) "
"as SkippingTestCase.test_skip_if_db_feature.<locals>.SkipTestCase "
"doesn't allow queries against the 'default' database."
# Python 3.11 uses fully qualified test name in the output.
% (".test_foo" if PY311 else ""),
)
class SkippingClassTestCase(TestCase):
def test_skip_class_unless_db_feature(self):
@skipUnlessDBFeature("__class__")
class NotSkippedTests(TestCase):
def test_dummy(self):
return
@skipUnlessDBFeature("missing")
@skipIfDBFeature("__class__")
class SkippedTests(TestCase):
def test_will_be_skipped(self):
self.fail("We should never arrive here.")
@skipIfDBFeature("__dict__")
class SkippedTestsSubclass(SkippedTests):
pass
test_suite = unittest.TestSuite()
test_suite.addTest(NotSkippedTests("test_dummy"))
try:
test_suite.addTest(SkippedTests("test_will_be_skipped"))
test_suite.addTest(SkippedTestsSubclass("test_will_be_skipped"))
except unittest.SkipTest:
self.fail("SkipTest should not be raised here.")
result = unittest.TextTestRunner(stream=StringIO()).run(test_suite)
self.assertEqual(result.testsRun, 3)
self.assertEqual(len(result.skipped), 2)
self.assertEqual(result.skipped[0][1], "Database has feature(s) __class__")
self.assertEqual(result.skipped[1][1], "Database has feature(s) __class__")
def test_missing_default_databases(self):
@skipIfDBFeature("missing")
class MissingDatabases(SimpleTestCase):
def test_assertion_error(self):
pass
suite = unittest.TestSuite()
try:
suite.addTest(MissingDatabases("test_assertion_error"))
except unittest.SkipTest:
self.fail("SkipTest should not be raised at this stage")
runner = unittest.TextTestRunner(stream=StringIO())
msg = (
"skipIfDBFeature cannot be used on <class 'test_utils.tests."
"SkippingClassTestCase.test_missing_default_databases.<locals>."
"MissingDatabases'> as it doesn't allow queries against the "
"'default' database."
)
with self.assertRaisesMessage(ValueError, msg):
runner.run(suite)
@override_settings(ROOT_URLCONF="test_utils.urls")
class AssertNumQueriesTests(TestCase):
def test_assert_num_queries(self):
def test_func():
raise ValueError
with self.assertRaises(ValueError):
self.assertNumQueries(2, test_func)
def test_assert_num_queries_with_client(self):
person = Person.objects.create(name="test")
self.assertNumQueries(
1, self.client.get, "/test_utils/get_person/%s/" % person.pk
)
self.assertNumQueries(
1, self.client.get, "/test_utils/get_person/%s/" % person.pk
)
def test_func():
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.assertNumQueries(2, test_func)
@unittest.skipUnless(
connection.vendor != "sqlite" or not connection.is_in_memory_db(),
"For SQLite in-memory tests, closing the connection destroys the database.",
)
class AssertNumQueriesUponConnectionTests(TransactionTestCase):
available_apps = []
def test_ignores_connection_configuration_queries(self):
real_ensure_connection = connection.ensure_connection
connection.close()
def make_configuration_query():
is_opening_connection = connection.connection is None
real_ensure_connection()
if is_opening_connection:
# Avoid infinite recursion. Creating a cursor calls
# ensure_connection() which is currently mocked by this method.
with connection.cursor() as cursor:
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
ensure_connection = (
"django.db.backends.base.base.BaseDatabaseWrapper.ensure_connection"
)
with mock.patch(ensure_connection, side_effect=make_configuration_query):
with self.assertNumQueries(1):
list(Car.objects.all())
class AssertQuerysetEqualTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.p1 = Person.objects.create(name="p1")
cls.p2 = Person.objects.create(name="p2")
def test_empty(self):
self.assertQuerysetEqual(Person.objects.filter(name="p3"), [])
def test_ordered(self):
self.assertQuerysetEqual(
Person.objects.order_by("name"),
[self.p1, self.p2],
)
def test_unordered(self):
self.assertQuerysetEqual(
Person.objects.order_by("name"), [self.p2, self.p1], ordered=False
)
def test_queryset(self):
self.assertQuerysetEqual(
Person.objects.order_by("name"),
Person.objects.order_by("name"),
)
def test_flat_values_list(self):
self.assertQuerysetEqual(
Person.objects.order_by("name").values_list("name", flat=True),
["p1", "p2"],
)
def test_transform(self):
self.assertQuerysetEqual(
Person.objects.order_by("name"),
[self.p1.pk, self.p2.pk],
transform=lambda x: x.pk,
)
def test_repr_transform(self):
self.assertQuerysetEqual(
Person.objects.order_by("name"),
[repr(self.p1), repr(self.p2)],
transform=repr,
)
def test_undefined_order(self):
# Using an unordered queryset with more than one ordered value
# is an error.
msg = (
"Trying to compare non-ordered queryset against more than one "
"ordered value."
)
with self.assertRaisesMessage(ValueError, msg):
self.assertQuerysetEqual(
Person.objects.all(),
[self.p1, self.p2],
)
# No error for one value.
self.assertQuerysetEqual(Person.objects.filter(name="p1"), [self.p1])
def test_repeated_values(self):
"""
assertQuerysetEqual checks the number of appearance of each item
when used with option ordered=False.
"""
batmobile = Car.objects.create(name="Batmobile")
k2000 = Car.objects.create(name="K 2000")
PossessedCar.objects.bulk_create(
[
PossessedCar(car=batmobile, belongs_to=self.p1),
PossessedCar(car=batmobile, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
]
)
with self.assertRaises(AssertionError):
self.assertQuerysetEqual(
self.p1.cars.all(), [batmobile, k2000], ordered=False
)
self.assertQuerysetEqual(
self.p1.cars.all(), [batmobile] * 2 + [k2000] * 4, ordered=False
)
def test_maxdiff(self):
names = ["Joe Smith %s" % i for i in range(20)]
Person.objects.bulk_create([Person(name=name) for name in names])
names.append("Extra Person")
with self.assertRaises(AssertionError) as ctx:
self.assertQuerysetEqual(
Person.objects.filter(name__startswith="Joe"),
names,
ordered=False,
transform=lambda p: p.name,
)
self.assertIn("Set self.maxDiff to None to see it.", str(ctx.exception))
original = self.maxDiff
self.maxDiff = None
try:
with self.assertRaises(AssertionError) as ctx:
self.assertQuerysetEqual(
Person.objects.filter(name__startswith="Joe"),
names,
ordered=False,
transform=lambda p: p.name,
)
finally:
self.maxDiff = original
exception_msg = str(ctx.exception)
self.assertNotIn("Set self.maxDiff to None to see it.", exception_msg)
for name in names:
self.assertIn(name, exception_msg)
@override_settings(ROOT_URLCONF="test_utils.urls")
class CaptureQueriesContextManagerTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.person_pk = str(Person.objects.create(name="test").pk)
def test_simple(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.get(pk=self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]["sql"])
with CaptureQueriesContext(connection) as captured_queries:
pass
self.assertEqual(0, len(captured_queries))
def test_within(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.get(pk=self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]["sql"])
def test_nested(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.count()
with CaptureQueriesContext(connection) as nested_captured_queries:
Person.objects.count()
self.assertEqual(1, len(nested_captured_queries))
self.assertEqual(2, len(captured_queries))
def test_failure(self):
with self.assertRaises(TypeError):
with CaptureQueriesContext(connection):
raise TypeError
def test_with_client(self):
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]["sql"])
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]["sql"])
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 2)
self.assertIn(self.person_pk, captured_queries[0]["sql"])
self.assertIn(self.person_pk, captured_queries[1]["sql"])
@override_settings(ROOT_URLCONF="test_utils.urls")
class AssertNumQueriesContextManagerTests(TestCase):
def test_simple(self):
with self.assertNumQueries(0):
pass
with self.assertNumQueries(1):
Person.objects.count()
with self.assertNumQueries(2):
Person.objects.count()
Person.objects.count()
def test_failure(self):
msg = "1 != 2 : 1 queries executed, 2 expected\nCaptured queries were:\n1."
with self.assertRaisesMessage(AssertionError, msg):
with self.assertNumQueries(2):
Person.objects.count()
with self.assertRaises(TypeError):
with self.assertNumQueries(4000):
raise TypeError
def test_with_client(self):
person = Person.objects.create(name="test")
with self.assertNumQueries(1):
self.client.get("/test_utils/get_person/%s/" % person.pk)
with self.assertNumQueries(1):
self.client.get("/test_utils/get_person/%s/" % person.pk)
with self.assertNumQueries(2):
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.client.get("/test_utils/get_person/%s/" % person.pk)
@override_settings(ROOT_URLCONF="test_utils.urls")
class AssertTemplateUsedContextManagerTests(SimpleTestCase):
def test_usage(self):
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/base.html")
with self.assertTemplateUsed(template_name="template_used/base.html"):
render_to_string("template_used/base.html")
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/include.html")
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/extends.html")
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/base.html")
render_to_string("template_used/base.html")
def test_nested_usage(self):
with self.assertTemplateUsed("template_used/base.html"):
with self.assertTemplateUsed("template_used/include.html"):
render_to_string("template_used/include.html")
with self.assertTemplateUsed("template_used/extends.html"):
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/extends.html")
with self.assertTemplateUsed("template_used/base.html"):
with self.assertTemplateUsed("template_used/alternative.html"):
render_to_string("template_used/alternative.html")
render_to_string("template_used/base.html")
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/extends.html")
with self.assertTemplateNotUsed("template_used/base.html"):
render_to_string("template_used/alternative.html")
render_to_string("template_used/base.html")
def test_not_used(self):
with self.assertTemplateNotUsed("template_used/base.html"):
pass
with self.assertTemplateNotUsed("template_used/alternative.html"):
pass
def test_error_message(self):
msg = "No templates used to render the response"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed("template_used/base.html"):
pass
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(template_name="template_used/base.html"):
pass
msg2 = (
"Template 'template_used/base.html' was not a template used to render "
"the response. Actual template(s) used: template_used/alternative.html"
)
with self.assertRaisesMessage(AssertionError, msg2):
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/alternative.html")
with self.assertRaisesMessage(
AssertionError, "No templates used to render the response"
):
response = self.client.get("/test_utils/no_template_used/")
self.assertTemplateUsed(response, "template_used/base.html")
def test_msg_prefix(self):
msg_prefix = "Prefix"
msg = f"{msg_prefix}: No templates used to render the response"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(
"template_used/base.html", msg_prefix=msg_prefix
):
pass
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(
template_name="template_used/base.html",
msg_prefix=msg_prefix,
):
pass
msg = (
f"{msg_prefix}: Template 'template_used/base.html' was not a "
f"template used to render the response. Actual template(s) used: "
f"template_used/alternative.html"
)
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(
"template_used/base.html", msg_prefix=msg_prefix
):
render_to_string("template_used/alternative.html")
def test_count(self):
with self.assertTemplateUsed("template_used/base.html", count=2):
render_to_string("template_used/base.html")
render_to_string("template_used/base.html")
msg = (
"Template 'template_used/base.html' was expected to be rendered "
"3 time(s) but was actually rendered 2 time(s)."
)
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed("template_used/base.html", count=3):
render_to_string("template_used/base.html")
render_to_string("template_used/base.html")
def test_failure(self):
msg = "response and/or template_name argument must be provided"
with self.assertRaisesMessage(TypeError, msg):
with self.assertTemplateUsed():
pass
msg = "No templates used to render the response"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(""):
pass
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(""):
render_to_string("template_used/base.html")
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(template_name=""):
pass
msg = (
"Template 'template_used/base.html' was not a template used to "
"render the response. Actual template(s) used: "
"template_used/alternative.html"
)
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed("template_used/base.html"):
render_to_string("template_used/alternative.html")
def test_assert_used_on_http_response(self):
response = HttpResponse()
msg = "%s() is only usable on responses fetched using the Django test Client."
with self.assertRaisesMessage(ValueError, msg % "assertTemplateUsed"):
self.assertTemplateUsed(response, "template.html")
with self.assertRaisesMessage(ValueError, msg % "assertTemplateNotUsed"):
self.assertTemplateNotUsed(response, "template.html")
class HTMLEqualTests(SimpleTestCase):
def test_html_parser(self):
element = parse_html("<div><p>Hello</p></div>")
self.assertEqual(len(element.children), 1)
self.assertEqual(element.children[0].name, "p")
self.assertEqual(element.children[0].children[0], "Hello")
parse_html("<p>")
parse_html("<p attr>")
dom = parse_html("<p>foo")
self.assertEqual(len(dom.children), 1)
self.assertEqual(dom.name, "p")
self.assertEqual(dom[0], "foo")
def test_parse_html_in_script(self):
parse_html('<script>var a = "<p" + ">";</script>')
parse_html(
"""
<script>
var js_sha_link='<p>***</p>';
</script>
"""
)
# script content will be parsed to text
dom = parse_html(
"""
<script><p>foo</p> '</scr'+'ipt>' <span>bar</span></script>
"""
)
self.assertEqual(len(dom.children), 1)
self.assertEqual(dom.children[0], "<p>foo</p> '</scr'+'ipt>' <span>bar</span>")
def test_self_closing_tags(self):
self_closing_tags = [
"area",
"base",
"br",
"col",
"embed",
"hr",
"img",
"input",
"link",
"meta",
"param",
"source",
"track",
"wbr",
# Deprecated tags
"frame",
"spacer",
]
for tag in self_closing_tags:
with self.subTest(tag):
dom = parse_html("<p>Hello <%s> world</p>" % tag)
self.assertEqual(len(dom.children), 3)
self.assertEqual(dom[0], "Hello")
self.assertEqual(dom[1].name, tag)
self.assertEqual(dom[2], "world")
dom = parse_html("<p>Hello <%s /> world</p>" % tag)
self.assertEqual(len(dom.children), 3)
self.assertEqual(dom[0], "Hello")
self.assertEqual(dom[1].name, tag)
self.assertEqual(dom[2], "world")
def test_simple_equal_html(self):
self.assertHTMLEqual("", "")
self.assertHTMLEqual("<p></p>", "<p></p>")
self.assertHTMLEqual("<p></p>", " <p> </p> ")
self.assertHTMLEqual("<div><p>Hello</p></div>", "<div><p>Hello</p></div>")
self.assertHTMLEqual("<div><p>Hello</p></div>", "<div> <p>Hello</p> </div>")
self.assertHTMLEqual("<div>\n<p>Hello</p></div>", "<div><p>Hello</p></div>\n")
self.assertHTMLEqual(
"<div><p>Hello\nWorld !</p></div>", "<div><p>Hello World\n!</p></div>"
)
self.assertHTMLEqual(
"<div><p>Hello\nWorld !</p></div>", "<div><p>Hello World\n!</p></div>"
)
self.assertHTMLEqual("<p>Hello World !</p>", "<p>Hello World\n\n!</p>")
self.assertHTMLEqual("<p> </p>", "<p></p>")
self.assertHTMLEqual("<p/>", "<p></p>")
self.assertHTMLEqual("<p />", "<p></p>")
self.assertHTMLEqual("<input checked>", '<input checked="checked">')
self.assertHTMLEqual("<p>Hello", "<p> Hello")
self.assertHTMLEqual("<p>Hello</p>World", "<p>Hello</p> World")
def test_ignore_comments(self):
self.assertHTMLEqual(
"<div>Hello<!-- this is a comment --> World!</div>",
"<div>Hello World!</div>",
)
def test_unequal_html(self):
self.assertHTMLNotEqual("<p>Hello</p>", "<p>Hello!</p>")
self.assertHTMLNotEqual("<p>foobar</p>", "<p>foo bar</p>")
self.assertHTMLNotEqual("<p>foo bar</p>", "<p>foo bar</p>")
self.assertHTMLNotEqual("<p>foo nbsp</p>", "<p>foo </p>")
self.assertHTMLNotEqual("<p>foo #20</p>", "<p>foo </p>")
self.assertHTMLNotEqual(
"<p><span>Hello</span><span>World</span></p>",
"<p><span>Hello</span>World</p>",
)
self.assertHTMLNotEqual(
"<p><span>Hello</span>World</p>",
"<p><span>Hello</span><span>World</span></p>",
)
def test_attributes(self):
self.assertHTMLEqual(
'<input type="text" id="id_name" />', '<input id="id_name" type="text" />'
)
self.assertHTMLEqual(
"""<input type='text' id="id_name" />""",
'<input id="id_name" type="text" />',
)
self.assertHTMLNotEqual(
'<input type="text" id="id_name" />',
'<input type="password" id="id_name" />',
)
def test_class_attribute(self):
pairs = [
('<p class="foo bar"></p>', '<p class="bar foo"></p>'),
('<p class=" foo bar "></p>', '<p class="bar foo"></p>'),
('<p class=" foo bar "></p>', '<p class="bar foo"></p>'),
('<p class="foo\tbar"></p>', '<p class="bar foo"></p>'),
('<p class="\tfoo\tbar\t"></p>', '<p class="bar foo"></p>'),
('<p class="\t\t\tfoo\t\t\tbar\t\t\t"></p>', '<p class="bar foo"></p>'),
('<p class="\t \nfoo \t\nbar\n\t "></p>', '<p class="bar foo"></p>'),
]
for html1, html2 in pairs:
with self.subTest(html1):
self.assertHTMLEqual(html1, html2)
def test_boolean_attribute(self):
html1 = "<input checked>"
html2 = '<input checked="">'
html3 = '<input checked="checked">'
self.assertHTMLEqual(html1, html2)
self.assertHTMLEqual(html1, html3)
self.assertHTMLEqual(html2, html3)
self.assertHTMLNotEqual(html1, '<input checked="invalid">')
self.assertEqual(str(parse_html(html1)), "<input checked>")
self.assertEqual(str(parse_html(html2)), "<input checked>")
self.assertEqual(str(parse_html(html3)), "<input checked>")
def test_non_boolean_attibutes(self):
html1 = "<input value>"
html2 = '<input value="">'
html3 = '<input value="value">'
self.assertHTMLEqual(html1, html2)
self.assertHTMLNotEqual(html1, html3)
self.assertEqual(str(parse_html(html1)), '<input value="">')
self.assertEqual(str(parse_html(html2)), '<input value="">')
def test_normalize_refs(self):
pairs = [
("'", "'"),
("'", "'"),
("'", "'"),
("'", "'"),
("'", "'"),
("'", "'"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
("&", "&"),
]
for pair in pairs:
with self.subTest(repr(pair)):
self.assertHTMLEqual(*pair)
def test_complex_examples(self):
self.assertHTMLEqual(
"""<tr><th><label for="id_first_name">First name:</label></th>
<td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th>
<td><input type="text" id="id_last_name" name="last_name" value="Lennon" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th>
<td><input type="text" value="1940-10-9" name="birthday" id="id_birthday" /></td></tr>""", # NOQA
"""
<tr><th>
<label for="id_first_name">First name:</label></th><td>
<input type="text" name="first_name" value="John" id="id_first_name" />
</td></tr>
<tr><th>
<label for="id_last_name">Last name:</label></th><td>
<input type="text" name="last_name" value="Lennon" id="id_last_name" />
</td></tr>
<tr><th>
<label for="id_birthday">Birthday:</label></th><td>
<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />
</td></tr>
""",
)
self.assertHTMLEqual(
"""<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet">
<title>Document</title>
<meta attribute="value">
</head>
<body>
<p>
This is a valid paragraph
<div> this is a div AFTER the p</div>
</body>
</html>""",
"""
<html>
<head>
<link rel="stylesheet">
<title>Document</title>
<meta attribute="value">
</head>
<body>
<p> This is a valid paragraph
<!-- browsers would close the p tag here -->
<div> this is a div AFTER the p</div>
</p> <!-- this is invalid HTML parsing, but it should make no
difference in most cases -->
</body>
</html>""",
)
def test_html_contain(self):
# equal html contains each other
dom1 = parse_html("<p>foo")
dom2 = parse_html("<p>foo</p>")
self.assertIn(dom1, dom2)
self.assertIn(dom2, dom1)
dom2 = parse_html("<div><p>foo</p></div>")
self.assertIn(dom1, dom2)
self.assertNotIn(dom2, dom1)
self.assertNotIn("<p>foo</p>", dom2)
self.assertIn("foo", dom2)
# when a root element is used ...
dom1 = parse_html("<p>foo</p><p>bar</p>")
dom2 = parse_html("<p>foo</p><p>bar</p>")
self.assertIn(dom1, dom2)
dom1 = parse_html("<p>foo</p>")
self.assertIn(dom1, dom2)
dom1 = parse_html("<p>bar</p>")
self.assertIn(dom1, dom2)
dom1 = parse_html("<div><p>foo</p><p>bar</p></div>")
self.assertIn(dom2, dom1)
def test_count(self):
# equal html contains each other one time
dom1 = parse_html("<p>foo")
dom2 = parse_html("<p>foo</p>")
self.assertEqual(dom1.count(dom2), 1)
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html("<p>foo</p><p>bar</p>")
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html("<p>foo foo</p><p>foo</p>")
self.assertEqual(dom2.count("foo"), 3)
dom2 = parse_html('<p class="bar">foo</p>')
self.assertEqual(dom2.count("bar"), 0)
self.assertEqual(dom2.count("class"), 0)
self.assertEqual(dom2.count("p"), 0)
self.assertEqual(dom2.count("o"), 2)
dom2 = parse_html("<p>foo</p><p>foo</p>")
self.assertEqual(dom2.count(dom1), 2)
dom2 = parse_html('<div><p>foo<input type=""></p><p>foo</p></div>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html("<div><div><p>foo</p></div></div>")
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html("<p>foo<p>foo</p></p>")
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html("<p>foo<p>bar</p></p>")
self.assertEqual(dom2.count(dom1), 0)
# HTML with a root element contains the same HTML with no root element.
dom1 = parse_html("<p>foo</p><p>bar</p>")
dom2 = parse_html("<div><p>foo</p><p>bar</p></div>")
self.assertEqual(dom2.count(dom1), 1)
# Target of search is a sequence of child elements and appears more
# than once.
dom2 = parse_html("<div><p>foo</p><p>bar</p><p>foo</p><p>bar</p></div>")
self.assertEqual(dom2.count(dom1), 2)
# Searched HTML has additional children.
dom1 = parse_html("<a/><b/>")
dom2 = parse_html("<a/><b/><c/>")
self.assertEqual(dom2.count(dom1), 1)
# No match found in children.
dom1 = parse_html("<b/><a/>")
self.assertEqual(dom2.count(dom1), 0)
# Target of search found among children and grandchildren.
dom1 = parse_html("<b/><b/>")
dom2 = parse_html("<a><b/><b/></a><b/><b/>")
self.assertEqual(dom2.count(dom1), 2)
def test_root_element_escaped_html(self):
html = "<br>"
parsed = parse_html(html)
self.assertEqual(str(parsed), html)
def test_parsing_errors(self):
with self.assertRaises(AssertionError):
self.assertHTMLEqual("<p>", "")
with self.assertRaises(AssertionError):
self.assertHTMLEqual("", "<p>")
error_msg = (
"First argument is not valid HTML:\n"
"('Unexpected end tag `div` (Line 1, Column 6)', (1, 6))"
)
with self.assertRaisesMessage(AssertionError, error_msg):
self.assertHTMLEqual("< div></ div>", "<div></div>")
with self.assertRaises(HTMLParseError):
parse_html("</p>")
def test_escaped_html_errors(self):
msg = "<p>\n<foo>\n</p> != <p>\n<foo>\n</p>\n"
with self.assertRaisesMessage(AssertionError, msg):
self.assertHTMLEqual("<p><foo></p>", "<p><foo></p>")
with self.assertRaisesMessage(AssertionError, msg):
self.assertHTMLEqual("<p><foo></p>", "<p><foo></p>")
def test_contains_html(self):
response = HttpResponse(
"""<body>
This is a form: <form method="get">
<input type="text" name="Hello" />
</form></body>"""
)
self.assertNotContains(response, "<input name='Hello' type='text'>")
self.assertContains(response, '<form method="get">')
self.assertContains(response, "<input name='Hello' type='text'>", html=True)
self.assertNotContains(response, '<form method="get">', html=True)
invalid_response = HttpResponse("""<body <bad>>""")
with self.assertRaises(AssertionError):
self.assertContains(invalid_response, "<p></p>")
with self.assertRaises(AssertionError):
self.assertContains(response, '<p "whats" that>')
def test_unicode_handling(self):
response = HttpResponse(
'<p class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</p>'
)
self.assertContains(
response,
'<p class="help">Some help text for the title (with Unicode ŠĐĆŽćžšđ)</p>',
html=True,
)
class JSONEqualTests(SimpleTestCase):
def test_simple_equal(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr1": "foo", "attr2":"baz"}'
self.assertJSONEqual(json1, json2)
def test_simple_equal_unordered(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr2":"baz", "attr1": "foo"}'
self.assertJSONEqual(json1, json2)
def test_simple_equal_raise(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONEqual(json1, json2)
def test_equal_parsing_errors(self):
invalid_json = '{"attr1": "foo, "attr2":"baz"}'
valid_json = '{"attr1": "foo", "attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONEqual(invalid_json, valid_json)
with self.assertRaises(AssertionError):
self.assertJSONEqual(valid_json, invalid_json)
def test_simple_not_equal(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr2":"baz"}'
self.assertJSONNotEqual(json1, json2)
def test_simple_not_equal_raise(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr1": "foo", "attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONNotEqual(json1, json2)
def test_not_equal_parsing_errors(self):
invalid_json = '{"attr1": "foo, "attr2":"baz"}'
valid_json = '{"attr1": "foo", "attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONNotEqual(invalid_json, valid_json)
with self.assertRaises(AssertionError):
self.assertJSONNotEqual(valid_json, invalid_json)
class XMLEqualTests(SimpleTestCase):
def test_simple_equal(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr1='a' attr2='b' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_unordered(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_raise(self):
xml1 = "<elem attr1='a' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_raises_message(self):
xml1 = "<elem attr1='a' />"
xml2 = "<elem attr2='b' attr1='a' />"
msg = """{xml1} != {xml2}
- <elem attr1='a' />
+ <elem attr2='b' attr1='a' />
? ++++++++++
""".format(
xml1=repr(xml1), xml2=repr(xml2)
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertXMLEqual(xml1, xml2)
def test_simple_not_equal(self):
xml1 = "<elem attr1='a' attr2='c' />"
xml2 = "<elem attr1='a' attr2='b' />"
self.assertXMLNotEqual(xml1, xml2)
def test_simple_not_equal_raise(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLNotEqual(xml1, xml2)
def test_parsing_errors(self):
xml_unvalid = "<elem attr1='a attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLNotEqual(xml_unvalid, xml2)
def test_comment_root(self):
xml1 = "<?xml version='1.0'?><!-- comment1 --><elem attr1='a' attr2='b' />"
xml2 = "<?xml version='1.0'?><!-- comment2 --><elem attr2='b' attr1='a' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_with_leading_or_trailing_whitespace(self):
xml1 = "<elem>foo</elem> \t\n"
xml2 = " \t\n<elem>foo</elem>"
self.assertXMLEqual(xml1, xml2)
def test_simple_not_equal_with_whitespace_in_the_middle(self):
xml1 = "<elem>foo</elem><elem>bar</elem>"
xml2 = "<elem>foo</elem> <elem>bar</elem>"
self.assertXMLNotEqual(xml1, xml2)
def test_doctype_root(self):
xml1 = '<?xml version="1.0"?><!DOCTYPE root SYSTEM "example1.dtd"><root />'
xml2 = '<?xml version="1.0"?><!DOCTYPE root SYSTEM "example2.dtd"><root />'
self.assertXMLEqual(xml1, xml2)
def test_processing_instruction(self):
xml1 = (
'<?xml version="1.0"?>'
'<?xml-model href="http://www.example1.com"?><root />'
)
xml2 = (
'<?xml version="1.0"?>'
'<?xml-model href="http://www.example2.com"?><root />'
)
self.assertXMLEqual(xml1, xml2)
self.assertXMLEqual(
'<?xml-stylesheet href="style1.xslt" type="text/xsl"?><root />',
'<?xml-stylesheet href="style2.xslt" type="text/xsl"?><root />',
)
class SkippingExtraTests(TestCase):
fixtures = ["should_not_be_loaded.json"]
# HACK: This depends on internals of our TestCase subclasses
def __call__(self, result=None):
# Detect fixture loading by counting SQL queries, should be zero
with self.assertNumQueries(0):
super().__call__(result)
@unittest.skip("Fixture loading should not be performed for skipped tests.")
def test_fixtures_are_skipped(self):
pass
class AssertRaisesMsgTest(SimpleTestCase):
def test_assert_raises_message(self):
msg = "'Expected message' not found in 'Unexpected message'"
# context manager form of assertRaisesMessage()
with self.assertRaisesMessage(AssertionError, msg):
with self.assertRaisesMessage(ValueError, "Expected message"):
raise ValueError("Unexpected message")
# callable form
def func():
raise ValueError("Unexpected message")
with self.assertRaisesMessage(AssertionError, msg):
self.assertRaisesMessage(ValueError, "Expected message", func)
def test_special_re_chars(self):
"""assertRaisesMessage shouldn't interpret RE special chars."""
def func1():
raise ValueError("[.*x+]y?")
with self.assertRaisesMessage(ValueError, "[.*x+]y?"):
func1()
class AssertWarnsMessageTests(SimpleTestCase):
def test_context_manager(self):
with self.assertWarnsMessage(UserWarning, "Expected message"):
warnings.warn("Expected message", UserWarning)
def test_context_manager_failure(self):
msg = "Expected message' not found in 'Unexpected message'"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertWarnsMessage(UserWarning, "Expected message"):
warnings.warn("Unexpected message", UserWarning)
def test_callable(self):
def func():
warnings.warn("Expected message", UserWarning)
self.assertWarnsMessage(UserWarning, "Expected message", func)
def test_special_re_chars(self):
def func1():
warnings.warn("[.*x+]y?", UserWarning)
with self.assertWarnsMessage(UserWarning, "[.*x+]y?"):
func1()
# TODO: Remove when dropping support for PY39.
class AssertNoLogsTest(SimpleTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
logging.config.dictConfig(DEFAULT_LOGGING)
cls.addClassCleanup(logging.config.dictConfig, settings.LOGGING)
def setUp(self):
self.logger = logging.getLogger("django")
@override_settings(DEBUG=True)
def test_fails_when_log_emitted(self):
msg = "Unexpected logs found: ['INFO:django:FAIL!']"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertNoLogs("django", "INFO"):
self.logger.info("FAIL!")
@override_settings(DEBUG=True)
def test_text_level(self):
with self.assertNoLogs("django", "INFO"):
self.logger.debug("DEBUG logs are ignored.")
@override_settings(DEBUG=True)
def test_int_level(self):
with self.assertNoLogs("django", logging.INFO):
self.logger.debug("DEBUG logs are ignored.")
@override_settings(DEBUG=True)
def test_default_level(self):
with self.assertNoLogs("django"):
self.logger.debug("DEBUG logs are ignored.")
@override_settings(DEBUG=True)
def test_does_not_hide_other_failures(self):
msg = "1 != 2"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertNoLogs("django"):
self.assertEqual(1, 2)
class AssertFieldOutputTests(SimpleTestCase):
def test_assert_field_output(self):
error_invalid = ["Enter a valid email address."]
self.assertFieldOutput(
EmailField, {"[email protected]": "[email protected]"}, {"aaa": error_invalid}
)
with self.assertRaises(AssertionError):
self.assertFieldOutput(
EmailField,
{"[email protected]": "[email protected]"},
{"aaa": error_invalid + ["Another error"]},
)
with self.assertRaises(AssertionError):
self.assertFieldOutput(
EmailField, {"[email protected]": "Wrong output"}, {"aaa": error_invalid}
)
with self.assertRaises(AssertionError):
self.assertFieldOutput(
EmailField,
{"[email protected]": "[email protected]"},
{"aaa": ["Come on, gimme some well formatted data, dude."]},
)
def test_custom_required_message(self):
class MyCustomField(IntegerField):
default_error_messages = {
"required": "This is really required.",
}
self.assertFieldOutput(MyCustomField, {}, {}, empty_value=None)
@override_settings(ROOT_URLCONF="test_utils.urls")
class AssertURLEqualTests(SimpleTestCase):
def test_equal(self):
valid_tests = (
("http://example.com/?", "http://example.com/"),
("http://example.com/?x=1&", "http://example.com/?x=1"),
("http://example.com/?x=1&y=2", "http://example.com/?y=2&x=1"),
("http://example.com/?x=1&y=2", "http://example.com/?y=2&x=1"),
(
"http://example.com/?x=1&y=2&a=1&a=2",
"http://example.com/?a=1&a=2&y=2&x=1",
),
("/path/to/?x=1&y=2&z=3", "/path/to/?z=3&y=2&x=1"),
("?x=1&y=2&z=3", "?z=3&y=2&x=1"),
("/test_utils/no_template_used/", reverse_lazy("no_template_used")),
)
for url1, url2 in valid_tests:
with self.subTest(url=url1):
self.assertURLEqual(url1, url2)
def test_not_equal(self):
invalid_tests = (
# Protocol must be the same.
("http://example.com/", "https://example.com/"),
("http://example.com/?x=1&x=2", "https://example.com/?x=2&x=1"),
("http://example.com/?x=1&y=bar&x=2", "https://example.com/?y=bar&x=2&x=1"),
# Parameters of the same name must be in the same order.
("/path/to?a=1&a=2", "/path/to/?a=2&a=1"),
)
for url1, url2 in invalid_tests:
with self.subTest(url=url1), self.assertRaises(AssertionError):
self.assertURLEqual(url1, url2)
def test_message(self):
msg = (
"Expected 'http://example.com/?x=1&x=2' to equal "
"'https://example.com/?x=2&x=1'"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertURLEqual(
"http://example.com/?x=1&x=2", "https://example.com/?x=2&x=1"
)
def test_msg_prefix(self):
msg = (
"Prefix: Expected 'http://example.com/?x=1&x=2' to equal "
"'https://example.com/?x=2&x=1'"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertURLEqual(
"http://example.com/?x=1&x=2",
"https://example.com/?x=2&x=1",
msg_prefix="Prefix: ",
)
class TestForm(Form):
field = CharField()
def clean_field(self):
value = self.cleaned_data.get("field", "")
if value == "invalid":
raise ValidationError("invalid value")
return value
def clean(self):
if self.cleaned_data.get("field") == "invalid_non_field":
raise ValidationError("non-field error")
return self.cleaned_data
@classmethod
def _get_cleaned_form(cls, field_value):
form = cls({"field": field_value})
form.full_clean()
return form
@classmethod
def valid(cls):
return cls._get_cleaned_form("valid")
@classmethod
def invalid(cls, nonfield=False):
return cls._get_cleaned_form("invalid_non_field" if nonfield else "invalid")
class TestFormset(formset_factory(TestForm)):
@classmethod
def _get_cleaned_formset(cls, field_value):
formset = cls(
{
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "0",
"form-0-field": field_value,
}
)
formset.full_clean()
return formset
@classmethod
def valid(cls):
return cls._get_cleaned_formset("valid")
@classmethod
def invalid(cls, nonfield=False, nonform=False):
if nonform:
formset = cls({}, error_messages={"missing_management_form": "error"})
formset.full_clean()
return formset
return cls._get_cleaned_formset("invalid_non_field" if nonfield else "invalid")
class AssertFormErrorTests(SimpleTestCase):
@ignore_warnings(category=RemovedInDjango50Warning)
def test_non_client_response(self):
msg = (
"assertFormError() is only usable on responses fetched using the "
"Django test Client."
)
response = HttpResponse()
with self.assertRaisesMessage(ValueError, msg):
self.assertFormError(response, "form", "field", "invalid value")
@ignore_warnings(category=RemovedInDjango50Warning)
def test_response_with_no_context(self):
msg = "Response did not use any contexts to render the response"
response = mock.Mock(context=[])
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(response, "form", "field", "invalid value")
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
response,
"form",
"field",
"invalid value",
msg_prefix=msg_prefix,
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_form_not_in_context(self):
msg = "The form 'form' was not used to render the response"
response = mock.Mock(context=[{}])
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(response, "form", "field", "invalid value")
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
response, "form", "field", "invalid value", msg_prefix=msg_prefix
)
def test_single_error(self):
self.assertFormError(TestForm.invalid(), "field", "invalid value")
def test_error_list(self):
self.assertFormError(TestForm.invalid(), "field", ["invalid value"])
def test_empty_errors_valid_form(self):
self.assertFormError(TestForm.valid(), "field", [])
def test_empty_errors_valid_form_non_field_errors(self):
self.assertFormError(TestForm.valid(), None, [])
def test_field_not_in_form(self):
msg = (
"The form <TestForm bound=True, valid=False, fields=(field)> does not "
"contain the field 'other_field'."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(TestForm.invalid(), "other_field", "invalid value")
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
TestForm.invalid(),
"other_field",
"invalid value",
msg_prefix=msg_prefix,
)
def test_field_with_no_errors(self):
msg = (
"The errors of field 'field' on form <TestForm bound=True, valid=True, "
"fields=(field)> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormError(TestForm.valid(), "field", "invalid value")
self.assertIn("[] != ['invalid value']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
TestForm.valid(), "field", "invalid value", msg_prefix=msg_prefix
)
def test_field_with_different_error(self):
msg = (
"The errors of field 'field' on form <TestForm bound=True, valid=False, "
"fields=(field)> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormError(TestForm.invalid(), "field", "other error")
self.assertIn("['invalid value'] != ['other error']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
TestForm.invalid(), "field", "other error", msg_prefix=msg_prefix
)
def test_unbound_form(self):
msg = (
"The form <TestForm bound=False, valid=Unknown, fields=(field)> is not "
"bound, it will never have any errors."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(TestForm(), "field", [])
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(TestForm(), "field", [], msg_prefix=msg_prefix)
def test_empty_errors_invalid_form(self):
msg = (
"The errors of field 'field' on form <TestForm bound=True, valid=False, "
"fields=(field)> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormError(TestForm.invalid(), "field", [])
self.assertIn("['invalid value'] != []", str(ctx.exception))
def test_non_field_errors(self):
self.assertFormError(TestForm.invalid(nonfield=True), None, "non-field error")
def test_different_non_field_errors(self):
msg = (
"The non-field errors of form <TestForm bound=True, valid=False, "
"fields=(field)> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormError(
TestForm.invalid(nonfield=True), None, "other non-field error"
)
self.assertIn(
"['non-field error'] != ['other non-field error']", str(ctx.exception)
)
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormError(
TestForm.invalid(nonfield=True),
None,
"other non-field error",
msg_prefix=msg_prefix,
)
class AssertFormsetErrorTests(SimpleTestCase):
@ignore_warnings(category=RemovedInDjango50Warning)
def test_non_client_response(self):
msg = (
"assertFormsetError() is only usable on responses fetched using "
"the Django test Client."
)
response = HttpResponse()
with self.assertRaisesMessage(ValueError, msg):
self.assertFormsetError(response, "formset", 0, "field", "invalid value")
@ignore_warnings(category=RemovedInDjango50Warning)
def test_response_with_no_context(self):
msg = "Response did not use any contexts to render the response"
response = mock.Mock(context=[])
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(response, "formset", 0, "field", "invalid value")
@ignore_warnings(category=RemovedInDjango50Warning)
def test_formset_not_in_context(self):
msg = "The formset 'formset' was not used to render the response"
response = mock.Mock(context=[{}])
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(response, "formset", 0, "field", "invalid value")
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
response, "formset", 0, "field", "invalid value", msg_prefix=msg_prefix
)
def test_single_error(self):
self.assertFormsetError(TestFormset.invalid(), 0, "field", "invalid value")
def test_error_list(self):
self.assertFormsetError(TestFormset.invalid(), 0, "field", ["invalid value"])
def test_empty_errors_valid_formset(self):
self.assertFormsetError(TestFormset.valid(), 0, "field", [])
def test_multiple_forms(self):
formset = TestFormset(
{
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "0",
"form-0-field": "valid",
"form-1-field": "invalid",
}
)
formset.full_clean()
self.assertFormsetError(formset, 0, "field", [])
self.assertFormsetError(formset, 1, "field", ["invalid value"])
def test_field_not_in_form(self):
msg = (
"The form 0 of formset <TestFormset: bound=True valid=False total_forms=1> "
"does not contain the field 'other_field'."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(
TestFormset.invalid(), 0, "other_field", "invalid value"
)
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(),
0,
"other_field",
"invalid value",
msg_prefix=msg_prefix,
)
def test_field_with_no_errors(self):
msg = (
"The errors of field 'field' on form 0 of formset <TestFormset: bound=True "
"valid=True total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(TestFormset.valid(), 0, "field", "invalid value")
self.assertIn("[] != ['invalid value']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.valid(), 0, "field", "invalid value", msg_prefix=msg_prefix
)
def test_field_with_different_error(self):
msg = (
"The errors of field 'field' on form 0 of formset <TestFormset: bound=True "
"valid=False total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(TestFormset.invalid(), 0, "field", "other error")
self.assertIn("['invalid value'] != ['other error']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(), 0, "field", "other error", msg_prefix=msg_prefix
)
def test_unbound_formset(self):
msg = (
"The formset <TestFormset: bound=False valid=Unknown total_forms=1> is not "
"bound, it will never have any errors."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(TestFormset(), 0, "field", [])
def test_empty_errors_invalid_formset(self):
msg = (
"The errors of field 'field' on form 0 of formset <TestFormset: bound=True "
"valid=False total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(TestFormset.invalid(), 0, "field", [])
self.assertIn("['invalid value'] != []", str(ctx.exception))
def test_non_field_errors(self):
self.assertFormsetError(
TestFormset.invalid(nonfield=True), 0, None, "non-field error"
)
def test_different_non_field_errors(self):
msg = (
"The non-field errors of form 0 of formset <TestFormset: bound=True "
"valid=False total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(
TestFormset.invalid(nonfield=True), 0, None, "other non-field error"
)
self.assertIn(
"['non-field error'] != ['other non-field error']", str(ctx.exception)
)
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(nonfield=True),
0,
None,
"other non-field error",
msg_prefix=msg_prefix,
)
def test_no_non_field_errors(self):
msg = (
"The non-field errors of form 0 of formset <TestFormset: bound=True "
"valid=False total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(TestFormset.invalid(), 0, None, "non-field error")
self.assertIn("[] != ['non-field error']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(), 0, None, "non-field error", msg_prefix=msg_prefix
)
def test_non_form_errors(self):
self.assertFormsetError(TestFormset.invalid(nonform=True), None, None, "error")
def test_different_non_form_errors(self):
msg = (
"The non-form errors of formset <TestFormset: bound=True valid=False "
"total_forms=0> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(
TestFormset.invalid(nonform=True), None, None, "other error"
)
self.assertIn("['error'] != ['other error']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(nonform=True),
None,
None,
"other error",
msg_prefix=msg_prefix,
)
def test_no_non_form_errors(self):
msg = (
"The non-form errors of formset <TestFormset: bound=True valid=False "
"total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg) as ctx:
self.assertFormsetError(TestFormset.invalid(), None, None, "error")
self.assertIn("[] != ['error']", str(ctx.exception))
msg_prefix = "Custom prefix"
with self.assertRaisesMessage(AssertionError, f"{msg_prefix}: {msg}"):
self.assertFormsetError(
TestFormset.invalid(),
None,
None,
"error",
msg_prefix=msg_prefix,
)
def test_non_form_errors_with_field(self):
msg = "You must use field=None with form_index=None."
with self.assertRaisesMessage(ValueError, msg):
self.assertFormsetError(
TestFormset.invalid(nonform=True), None, "field", "error"
)
def test_form_index_too_big(self):
msg = (
"The formset <TestFormset: bound=True valid=False total_forms=1> only has "
"1 form."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(TestFormset.invalid(), 2, "field", "error")
def test_form_index_too_big_plural(self):
formset = TestFormset(
{
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "0",
"form-0-field": "valid",
"form-1-field": "valid",
}
)
formset.full_clean()
msg = (
"The formset <TestFormset: bound=True valid=True total_forms=2> only has 2 "
"forms."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(formset, 2, "field", "error")
# RemovedInDjango50Warning
class AssertFormErrorDeprecationTests(SimpleTestCase):
"""
Exhaustively test all possible combinations of args/kwargs for the old
signature.
"""
@ignore_warnings(category=RemovedInDjango50Warning)
def test_assert_form_error_errors_none(self):
msg = (
"The errors of field 'field' on form <TestForm bound=True, valid=False, "
"fields=(field)> don't match."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(TestForm.invalid(), "field", None)
def test_assert_form_error_errors_none_warning(self):
msg = (
"Passing errors=None to assertFormError() is deprecated, use "
"errors=[] instead."
)
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
self.assertFormError(TestForm.valid(), "field", None)
def _assert_form_error_old_api_cases(self, form, field, errors, msg_prefix):
response = mock.Mock(context=[{"form": TestForm.invalid()}])
return (
((response, form, field, errors), {}),
((response, form, field, errors, msg_prefix), {}),
((response, form, field, errors), {"msg_prefix": msg_prefix}),
((response, form, field), {"errors": errors}),
((response, form, field), {"errors": errors, "msg_prefix": msg_prefix}),
((response, form), {"field": field, "errors": errors}),
(
(response, form),
{"field": field, "errors": errors, "msg_prefix": msg_prefix},
),
((response,), {"form": form, "field": field, "errors": errors}),
(
(response,),
{
"form": form,
"field": field,
"errors": errors,
"msg_prefix": msg_prefix,
},
),
(
(),
{"response": response, "form": form, "field": field, "errors": errors},
),
(
(),
{
"response": response,
"form": form,
"field": field,
"errors": errors,
"msg_prefix": msg_prefix,
},
),
)
def test_assert_form_error_old_api(self):
deprecation_msg = (
"Passing response to assertFormError() is deprecated. Use the form object "
"directly: assertFormError(response.context['form'], 'field', ...)"
)
for args, kwargs in self._assert_form_error_old_api_cases(
form="form",
field="field",
errors=["invalid value"],
msg_prefix="Custom prefix",
):
with self.subTest(args=args, kwargs=kwargs):
with self.assertWarnsMessage(RemovedInDjango50Warning, deprecation_msg):
self.assertFormError(*args, **kwargs)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_assert_form_error_old_api_assertion_error(self):
for args, kwargs in self._assert_form_error_old_api_cases(
form="form",
field="field",
errors=["other error"],
msg_prefix="Custom prefix",
):
with self.subTest(args=args, kwargs=kwargs):
with self.assertRaises(AssertionError):
self.assertFormError(*args, **kwargs)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_assert_formset_error_errors_none(self):
msg = (
"The errors of field 'field' on form 0 of formset <TestFormset: bound=True "
"valid=False total_forms=1> don't match."
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(TestFormset.invalid(), 0, "field", None)
def test_assert_formset_error_errors_none_warning(self):
msg = (
"Passing errors=None to assertFormsetError() is deprecated, use "
"errors=[] instead."
)
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
self.assertFormsetError(TestFormset.valid(), 0, "field", None)
def _assert_formset_error_old_api_cases(
self, formset, form_index, field, errors, msg_prefix
):
response = mock.Mock(context=[{"formset": TestFormset.invalid()}])
return (
((response, formset, form_index, field, errors), {}),
((response, formset, form_index, field, errors, msg_prefix), {}),
(
(response, formset, form_index, field, errors),
{"msg_prefix": msg_prefix},
),
((response, formset, form_index, field), {"errors": errors}),
(
(response, formset, form_index, field),
{"errors": errors, "msg_prefix": msg_prefix},
),
((response, formset, form_index), {"field": field, "errors": errors}),
(
(response, formset, form_index),
{"field": field, "errors": errors, "msg_prefix": msg_prefix},
),
(
(response, formset),
{"form_index": form_index, "field": field, "errors": errors},
),
(
(response, formset),
{
"form_index": form_index,
"field": field,
"errors": errors,
"msg_prefix": msg_prefix,
},
),
(
(response,),
{
"formset": formset,
"form_index": form_index,
"field": field,
"errors": errors,
},
),
(
(response,),
{
"formset": formset,
"form_index": form_index,
"field": field,
"errors": errors,
"msg_prefix": msg_prefix,
},
),
(
(),
{
"response": response,
"formset": formset,
"form_index": form_index,
"field": field,
"errors": errors,
},
),
(
(),
{
"response": response,
"formset": formset,
"form_index": form_index,
"field": field,
"errors": errors,
"msg_prefix": msg_prefix,
},
),
)
def test_assert_formset_error_old_api(self):
deprecation_msg = (
"Passing response to assertFormsetError() is deprecated. Use the formset "
"object directly: assertFormsetError(response.context['formset'], 0, ...)"
)
for args, kwargs in self._assert_formset_error_old_api_cases(
formset="formset",
form_index=0,
field="field",
errors=["invalid value"],
msg_prefix="Custom prefix",
):
with self.subTest(args=args, kwargs=kwargs):
with self.assertWarnsMessage(RemovedInDjango50Warning, deprecation_msg):
self.assertFormsetError(*args, **kwargs)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_assert_formset_error_old_api_assertion_error(self):
for args, kwargs in self._assert_formset_error_old_api_cases(
formset="formset",
form_index=0,
field="field",
errors=["other error"],
msg_prefix="Custom prefix",
):
with self.subTest(args=args, kwargs=kwargs):
with self.assertRaises(AssertionError):
self.assertFormsetError(*args, **kwargs)
class FirstUrls:
urlpatterns = [path("first/", empty_response, name="first")]
class SecondUrls:
urlpatterns = [path("second/", empty_response, name="second")]
class SetupTestEnvironmentTests(SimpleTestCase):
def test_setup_test_environment_calling_more_than_once(self):
with self.assertRaisesMessage(
RuntimeError, "setup_test_environment() was already called"
):
setup_test_environment()
def test_allowed_hosts(self):
for type_ in (list, tuple):
with self.subTest(type_=type_):
allowed_hosts = type_("*")
with mock.patch("django.test.utils._TestState") as x:
del x.saved_data
with self.settings(ALLOWED_HOSTS=allowed_hosts):
setup_test_environment()
self.assertEqual(settings.ALLOWED_HOSTS, ["*", "testserver"])
class OverrideSettingsTests(SimpleTestCase):
# #21518 -- If neither override_settings nor a setting_changed receiver
# clears the URL cache between tests, then one of test_first or
# test_second will fail.
@override_settings(ROOT_URLCONF=FirstUrls)
def test_urlconf_first(self):
reverse("first")
@override_settings(ROOT_URLCONF=SecondUrls)
def test_urlconf_second(self):
reverse("second")
def test_urlconf_cache(self):
with self.assertRaises(NoReverseMatch):
reverse("first")
with self.assertRaises(NoReverseMatch):
reverse("second")
with override_settings(ROOT_URLCONF=FirstUrls):
self.client.get(reverse("first"))
with self.assertRaises(NoReverseMatch):
reverse("second")
with override_settings(ROOT_URLCONF=SecondUrls):
with self.assertRaises(NoReverseMatch):
reverse("first")
self.client.get(reverse("second"))
self.client.get(reverse("first"))
with self.assertRaises(NoReverseMatch):
reverse("second")
with self.assertRaises(NoReverseMatch):
reverse("first")
with self.assertRaises(NoReverseMatch):
reverse("second")
def test_override_media_root(self):
"""
Overriding the MEDIA_ROOT setting should be reflected in the
base_location attribute of django.core.files.storage.default_storage.
"""
self.assertEqual(default_storage.base_location, "")
with self.settings(MEDIA_ROOT="test_value"):
self.assertEqual(default_storage.base_location, "test_value")
def test_override_media_url(self):
"""
Overriding the MEDIA_URL setting should be reflected in the
base_url attribute of django.core.files.storage.default_storage.
"""
self.assertEqual(default_storage.base_location, "")
with self.settings(MEDIA_URL="/test_value/"):
self.assertEqual(default_storage.base_url, "/test_value/")
def test_override_file_upload_permissions(self):
"""
Overriding the FILE_UPLOAD_PERMISSIONS setting should be reflected in
the file_permissions_mode attribute of
django.core.files.storage.default_storage.
"""
self.assertEqual(default_storage.file_permissions_mode, 0o644)
with self.settings(FILE_UPLOAD_PERMISSIONS=0o777):
self.assertEqual(default_storage.file_permissions_mode, 0o777)
def test_override_file_upload_directory_permissions(self):
"""
Overriding the FILE_UPLOAD_DIRECTORY_PERMISSIONS setting should be
reflected in the directory_permissions_mode attribute of
django.core.files.storage.default_storage.
"""
self.assertIsNone(default_storage.directory_permissions_mode)
with self.settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777):
self.assertEqual(default_storage.directory_permissions_mode, 0o777)
def test_override_database_routers(self):
"""
Overriding DATABASE_ROUTERS should update the base router.
"""
test_routers = [object()]
with self.settings(DATABASE_ROUTERS=test_routers):
self.assertEqual(router.routers, test_routers)
def test_override_static_url(self):
"""
Overriding the STATIC_URL setting should be reflected in the
base_url attribute of
django.contrib.staticfiles.storage.staticfiles_storage.
"""
with self.settings(STATIC_URL="/test/"):
self.assertEqual(staticfiles_storage.base_url, "/test/")
def test_override_static_root(self):
"""
Overriding the STATIC_ROOT setting should be reflected in the
location attribute of
django.contrib.staticfiles.storage.staticfiles_storage.
"""
with self.settings(STATIC_ROOT="/tmp/test"):
self.assertEqual(staticfiles_storage.location, os.path.abspath("/tmp/test"))
def test_override_staticfiles_storage(self):
"""
Overriding the STATICFILES_STORAGE setting should be reflected in
the value of django.contrib.staticfiles.storage.staticfiles_storage.
"""
new_class = "ManifestStaticFilesStorage"
new_storage = "django.contrib.staticfiles.storage." + new_class
with self.settings(STATICFILES_STORAGE=new_storage):
self.assertEqual(staticfiles_storage.__class__.__name__, new_class)
def test_override_staticfiles_finders(self):
"""
Overriding the STATICFILES_FINDERS setting should be reflected in
the return value of django.contrib.staticfiles.finders.get_finders.
"""
current = get_finders()
self.assertGreater(len(list(current)), 1)
finders = ["django.contrib.staticfiles.finders.FileSystemFinder"]
with self.settings(STATICFILES_FINDERS=finders):
self.assertEqual(len(list(get_finders())), len(finders))
def test_override_staticfiles_dirs(self):
"""
Overriding the STATICFILES_DIRS setting should be reflected in
the locations attribute of the
django.contrib.staticfiles.finders.FileSystemFinder instance.
"""
finder = get_finder("django.contrib.staticfiles.finders.FileSystemFinder")
test_path = "/tmp/test"
expected_location = ("", test_path)
self.assertNotIn(expected_location, finder.locations)
with self.settings(STATICFILES_DIRS=[test_path]):
finder = get_finder("django.contrib.staticfiles.finders.FileSystemFinder")
self.assertIn(expected_location, finder.locations)
@skipUnlessDBFeature("supports_transactions")
class TestBadSetUpTestData(TestCase):
"""
An exception in setUpTestData() shouldn't leak a transaction which would
cascade across the rest of the test suite.
"""
class MyException(Exception):
pass
@classmethod
def setUpClass(cls):
try:
super().setUpClass()
except cls.MyException:
cls._in_atomic_block = connection.in_atomic_block
@classmethod
def tearDownClass(Cls):
# override to avoid a second cls._rollback_atomics() which would fail.
# Normal setUpClass() methods won't have exception handling so this
# method wouldn't typically be run.
pass
@classmethod
def setUpTestData(cls):
# Simulate a broken setUpTestData() method.
raise cls.MyException()
def test_failure_in_setUpTestData_should_rollback_transaction(self):
# setUpTestData() should call _rollback_atomics() so that the
# transaction doesn't leak.
self.assertFalse(self._in_atomic_block)
@skipUnlessDBFeature("supports_transactions")
class CaptureOnCommitCallbacksTests(TestCase):
databases = {"default", "other"}
callback_called = False
def enqueue_callback(self, using="default"):
def hook():
self.callback_called = True
transaction.on_commit(hook, using=using)
def test_no_arguments(self):
with self.captureOnCommitCallbacks() as callbacks:
self.enqueue_callback()
self.assertEqual(len(callbacks), 1)
self.assertIs(self.callback_called, False)
callbacks[0]()
self.assertIs(self.callback_called, True)
def test_using(self):
with self.captureOnCommitCallbacks(using="other") as callbacks:
self.enqueue_callback(using="other")
self.assertEqual(len(callbacks), 1)
self.assertIs(self.callback_called, False)
callbacks[0]()
self.assertIs(self.callback_called, True)
def test_different_using(self):
with self.captureOnCommitCallbacks(using="default") as callbacks:
self.enqueue_callback(using="other")
self.assertEqual(callbacks, [])
def test_execute(self):
with self.captureOnCommitCallbacks(execute=True) as callbacks:
self.enqueue_callback()
self.assertEqual(len(callbacks), 1)
self.assertIs(self.callback_called, True)
def test_pre_callback(self):
def pre_hook():
pass
transaction.on_commit(pre_hook, using="default")
with self.captureOnCommitCallbacks() as callbacks:
self.enqueue_callback()
self.assertEqual(len(callbacks), 1)
self.assertNotEqual(callbacks[0], pre_hook)
def test_with_rolled_back_savepoint(self):
with self.captureOnCommitCallbacks() as callbacks:
try:
with transaction.atomic():
self.enqueue_callback()
raise IntegrityError
except IntegrityError:
# Inner transaction.atomic() has been rolled back.
pass
self.assertEqual(callbacks, [])
def test_execute_recursive(self):
with self.captureOnCommitCallbacks(execute=True) as callbacks:
transaction.on_commit(self.enqueue_callback)
self.assertEqual(len(callbacks), 2)
self.assertIs(self.callback_called, True)
def test_execute_tree(self):
"""
A visualisation of the callback tree tested. Each node is expected to
be visited only once:
└─branch_1
├─branch_2
│ ├─leaf_1
│ └─leaf_2
└─leaf_3
"""
branch_1_call_counter = 0
branch_2_call_counter = 0
leaf_1_call_counter = 0
leaf_2_call_counter = 0
leaf_3_call_counter = 0
def leaf_1():
nonlocal leaf_1_call_counter
leaf_1_call_counter += 1
def leaf_2():
nonlocal leaf_2_call_counter
leaf_2_call_counter += 1
def leaf_3():
nonlocal leaf_3_call_counter
leaf_3_call_counter += 1
def branch_1():
nonlocal branch_1_call_counter
branch_1_call_counter += 1
transaction.on_commit(branch_2)
transaction.on_commit(leaf_3)
def branch_2():
nonlocal branch_2_call_counter
branch_2_call_counter += 1
transaction.on_commit(leaf_1)
transaction.on_commit(leaf_2)
with self.captureOnCommitCallbacks(execute=True) as callbacks:
transaction.on_commit(branch_1)
self.assertEqual(branch_1_call_counter, 1)
self.assertEqual(branch_2_call_counter, 1)
self.assertEqual(leaf_1_call_counter, 1)
self.assertEqual(leaf_2_call_counter, 1)
self.assertEqual(leaf_3_call_counter, 1)
self.assertEqual(callbacks, [branch_1, branch_2, leaf_3, leaf_1, leaf_2])
class DisallowedDatabaseQueriesTests(SimpleTestCase):
def test_disallowed_database_connections(self):
expected_message = (
"Database connections to 'default' are not allowed in SimpleTestCase "
"subclasses. Either subclass TestCase or TransactionTestCase to "
"ensure proper test isolation or add 'default' to "
"test_utils.tests.DisallowedDatabaseQueriesTests.databases to "
"silence this failure."
)
with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message):
connection.connect()
with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message):
connection.temporary_connection()
def test_disallowed_database_queries(self):
expected_message = (
"Database queries to 'default' are not allowed in SimpleTestCase "
"subclasses. Either subclass TestCase or TransactionTestCase to "
"ensure proper test isolation or add 'default' to "
"test_utils.tests.DisallowedDatabaseQueriesTests.databases to "
"silence this failure."
)
with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message):
Car.objects.first()
def test_disallowed_database_chunked_cursor_queries(self):
expected_message = (
"Database queries to 'default' are not allowed in SimpleTestCase "
"subclasses. Either subclass TestCase or TransactionTestCase to "
"ensure proper test isolation or add 'default' to "
"test_utils.tests.DisallowedDatabaseQueriesTests.databases to "
"silence this failure."
)
with self.assertRaisesMessage(DatabaseOperationForbidden, expected_message):
next(Car.objects.iterator())
class AllowedDatabaseQueriesTests(SimpleTestCase):
databases = {"default"}
def test_allowed_database_queries(self):
Car.objects.first()
def test_allowed_database_chunked_cursor_queries(self):
next(Car.objects.iterator(), None)
class DatabaseAliasTests(SimpleTestCase):
def setUp(self):
self.addCleanup(setattr, self.__class__, "databases", self.databases)
def test_no_close_match(self):
self.__class__.databases = {"void"}
message = (
"test_utils.tests.DatabaseAliasTests.databases refers to 'void' which is "
"not defined in settings.DATABASES."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
self._validate_databases()
def test_close_match(self):
self.__class__.databases = {"defualt"}
message = (
"test_utils.tests.DatabaseAliasTests.databases refers to 'defualt' which "
"is not defined in settings.DATABASES. Did you mean 'default'?"
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
self._validate_databases()
def test_match(self):
self.__class__.databases = {"default", "other"}
self.assertEqual(self._validate_databases(), frozenset({"default", "other"}))
def test_all(self):
self.__class__.databases = "__all__"
self.assertEqual(self._validate_databases(), frozenset(connections))
@isolate_apps("test_utils", attr_name="class_apps")
class IsolatedAppsTests(SimpleTestCase):
def test_installed_apps(self):
self.assertEqual(
[app_config.label for app_config in self.class_apps.get_app_configs()],
["test_utils"],
)
def test_class_decoration(self):
class ClassDecoration(models.Model):
pass
self.assertEqual(ClassDecoration._meta.apps, self.class_apps)
@isolate_apps("test_utils", kwarg_name="method_apps")
def test_method_decoration(self, method_apps):
class MethodDecoration(models.Model):
pass
self.assertEqual(MethodDecoration._meta.apps, method_apps)
def test_context_manager(self):
with isolate_apps("test_utils") as context_apps:
class ContextManager(models.Model):
pass
self.assertEqual(ContextManager._meta.apps, context_apps)
@isolate_apps("test_utils", kwarg_name="method_apps")
def test_nested(self, method_apps):
class MethodDecoration(models.Model):
pass
with isolate_apps("test_utils") as context_apps:
class ContextManager(models.Model):
pass
with isolate_apps("test_utils") as nested_context_apps:
class NestedContextManager(models.Model):
pass
self.assertEqual(MethodDecoration._meta.apps, method_apps)
self.assertEqual(ContextManager._meta.apps, context_apps)
self.assertEqual(NestedContextManager._meta.apps, nested_context_apps)
class DoNothingDecorator(TestContextDecorator):
def enable(self):
pass
def disable(self):
pass
class TestContextDecoratorTests(SimpleTestCase):
@mock.patch.object(DoNothingDecorator, "disable")
def test_exception_in_setup(self, mock_disable):
"""An exception is setUp() is reraised after disable() is called."""
class ExceptionInSetUp(unittest.TestCase):
def setUp(self):
raise NotImplementedError("reraised")
decorator = DoNothingDecorator()
decorated_test_class = decorator.__call__(ExceptionInSetUp)()
self.assertFalse(mock_disable.called)
with self.assertRaisesMessage(NotImplementedError, "reraised"):
decorated_test_class.setUp()
decorated_test_class.doCleanups()
self.assertTrue(mock_disable.called)
def test_cleanups_run_after_tearDown(self):
calls = []
class SaveCallsDecorator(TestContextDecorator):
def enable(self):
calls.append("enable")
def disable(self):
calls.append("disable")
class AddCleanupInSetUp(unittest.TestCase):
def setUp(self):
calls.append("setUp")
self.addCleanup(lambda: calls.append("cleanup"))
decorator = SaveCallsDecorator()
decorated_test_class = decorator.__call__(AddCleanupInSetUp)()
decorated_test_class.setUp()
decorated_test_class.tearDown()
decorated_test_class.doCleanups()
self.assertEqual(calls, ["enable", "setUp", "cleanup", "disable"])
|
f5f26e5a95be6dd022af40015d4eb9bc96208cdba5a8900e308081e355e8062d | import datetime
import os
import re
import unittest
from unittest import mock
from urllib.parse import parse_qsl, urljoin, urlparse
try:
import zoneinfo
except ImportError:
from backports import zoneinfo
try:
import pytz
except ImportError:
pytz = None
from django.contrib import admin
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.db import connection
from django.forms.utils import ErrorList
from django.template.response import TemplateResponse
from django.test import (
TestCase,
modify_settings,
override_settings,
skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix
from django.urls import NoReverseMatch, resolve, reverse
from django.utils import formats, translation
from django.utils.cache import get_max_age
from django.utils.encoding import iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor,
AdminOrderedAdminMethod,
AdminOrderedCallable,
AdminOrderedField,
AdminOrderedModelMethod,
Album,
Answer,
Answer2,
Article,
BarAccount,
Book,
Bookmark,
Box,
Category,
Chapter,
ChapterXtra1,
ChapterXtra2,
Character,
Child,
Choice,
City,
Collector,
Color,
ComplexSortedPerson,
CoverLetter,
CustomArticle,
CyclicOne,
CyclicTwo,
DooHickey,
Employee,
EmptyModel,
Fabric,
FancyDoodad,
FieldOverridePost,
FilteredManager,
FooAccount,
FoodDelivery,
FunkyTag,
Gallery,
Grommet,
Inquisition,
Language,
Link,
MainPrepopulated,
Media,
ModelWithStringPrimaryKey,
OtherStory,
Paper,
Parent,
ParentWithDependentChildren,
ParentWithUUIDPK,
Person,
Persona,
Picture,
Pizza,
Plot,
PlotDetails,
PluggableSearchPerson,
Podcast,
Post,
PrePopulatedPost,
Promo,
Question,
ReadablePizza,
ReadOnlyPizza,
ReadOnlyRelatedField,
Recommendation,
Recommender,
RelatedPrepopulated,
RelatedWithUUIDPKModel,
Report,
Restaurant,
RowLevelChangePermissionModel,
SecretHideout,
Section,
ShortMessage,
Simple,
Song,
State,
Story,
SuperSecretHideout,
SuperVillain,
Telegram,
TitleTranslation,
Topping,
Traveler,
UnchangeableObject,
UndeletableObject,
UnorderedObject,
UserProxy,
Villain,
Vodcast,
Whatsit,
Widget,
Worker,
WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
MULTIPART_ENCTYPE = 'enctype="multipart/form-data"'
def make_aware_datetimes(dt, iana_key):
"""Makes one aware datetime for each supported time zone provider."""
yield dt.replace(tzinfo=zoneinfo.ZoneInfo(iana_key))
if pytz is not None:
yield pytz.timezone(iana_key).localize(dt, is_dst=None)
class AdminFieldExtractionMixin:
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
fields = []
for fieldset in response.context["adminform"]:
for field_line in fieldset:
fields.extend(field_line)
return fields
def get_admin_readonly_fields(self, response):
"""
Return the readonly fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response) if f.is_readonly]
def get_admin_readonly_field(self, response, field_name):
"""
Return the readonly field for the given field_name.
"""
admin_readonly_fields = self.get_admin_readonly_fields(response)
for field in admin_readonly_fields:
if field.field["name"] == field_name:
return field
@override_settings(ROOT_URLCONF="admin_views.urls", USE_I18N=True, LANGUAGE_CODE="en")
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
title="Article 1",
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
title="Article 2",
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.color1 = Color.objects.create(value="Red", warm=True)
cls.color2 = Color.objects.create(value="Orange", warm=True)
cls.color3 = Color.objects.create(value="Blue", warm=False)
cls.color4 = Color.objects.create(value="Green", warm=False)
cls.fab1 = Fabric.objects.create(surface="x")
cls.fab2 = Fabric.objects.create(surface="y")
cls.fab3 = Fabric.objects.create(surface="plain")
cls.b1 = Book.objects.create(name="Book 1")
cls.b2 = Book.objects.create(name="Book 2")
cls.pro1 = Promo.objects.create(name="Promo 1", book=cls.b1)
cls.pro1 = Promo.objects.create(name="Promo 2", book=cls.b2)
cls.chap1 = Chapter.objects.create(
title="Chapter 1", content="[ insert contents here ]", book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title="Chapter 2", content="[ insert contents here ]", book=cls.b1
)
cls.chap3 = Chapter.objects.create(
title="Chapter 1", content="[ insert contents here ]", book=cls.b2
)
cls.chap4 = Chapter.objects.create(
title="Chapter 2", content="[ insert contents here ]", book=cls.b2
)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra="ChapterXtra1 1")
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra="ChapterXtra1 2")
Actor.objects.create(name="Palin", age=27)
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.force_login(self.superuser)
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(
response.content.index(text1.encode()),
response.content.index(text2.encode()),
(failing_msg or "")
+ "\nResponse:\n"
+ response.content.decode(response.charset),
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse("admin:admin_views_article_add")
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(
reverse("admin:admin_views_section_add"), {"name": "My Section"}
)
self.assertContains(
response,
'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response",
)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
GET on the change_view (when passing a string as the PK argument for a
model with an integer PK field) redirects to the index page with a
message saying the object doesn't exist.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(quote("abc/<b>"),)),
follow=True,
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["section with ID “abc/<b>” doesn’t exist. Perhaps it was deleted?"],
)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)).replace(
"change/", ""
)
)
self.assertRedirects(
response, reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
def test_basic_inheritance_GET_string_PK(self):
"""
GET on the change_view (for inherited models) redirects to the index
page with a message saying the object doesn't exist.
"""
response = self.client.get(
reverse("admin:admin_views_supervillain_change", args=("abc",)), follow=True
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["super villain with ID “abc” doesn’t exist. Perhaps it was deleted?"],
)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse("admin:admin_views_section_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""HTTP response from a popup is properly escaped."""
post_data = {
IS_POPUP_VAR: "1",
"title": "title with a new\nline",
"content": "some content",
"date_0": "2010-09-10",
"date_1": "14:55:39",
}
response = self.client.post(reverse("admin:admin_views_article_add"), post_data)
self.assertContains(response, "title with a new\\nline")
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse("admin:admin_views_section_change", args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update(
{
"_saveasnew": "Save+as+new",
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
}
)
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update(
{
"_saveasnew": "Save+as+new",
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
}
)
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), post_data
)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest("id").article_set.count(), 2)
def test_change_list_column_field_classes(self):
response = self.client.get(reverse("admin:admin_views_article_changelist"))
# callables display the callable name.
self.assertContains(response, "column-callable_year")
self.assertContains(response, "field-callable_year")
# lambdas display as "lambda" + index that they appear in list_display.
self.assertContains(response, "column-lambda8")
self.assertContains(response, "field-lambda8")
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": 2}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_property(self):
"""
Sort on a list_display field that is a property (column 10 is
a property in Article model).
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": 10}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on property are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on property are out of order.",
)
def test_change_list_sorting_callable_query_expression(self):
"""Query expressions may be used for admin_order_field."""
tests = [
("order_by_expression", 9),
("order_by_f_expression", 12),
("order_by_orderby_expression", 13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse("admin:admin_views_article_changelist"),
{"o": index},
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_callable_query_expression_reverse(self):
tests = [
("order_by_expression", -9),
("order_by_f_expression", -12),
("order_by_orderby_expression", -13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse("admin:admin_views_article_changelist"),
{"o": index},
)
self.assertContentBefore(
response,
"Middle content",
"Oldest content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Newest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "-3"}
)
self.assertContentBefore(
response,
"Newest content",
"Middle content",
"Results of sorting on Model method are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Oldest content",
"Results of sorting on Model method are out of order.",
)
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "4"}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on ModelAdmin method are out of order.",
)
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
td = '<td class="field-model_property_year">%s</td>'
td_2000, td_2008, td_2009 = td % 2000, td % 2008, td % 2009
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "6"}
)
self.assertContentBefore(
response,
td_2009,
td_2008,
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
td_2008,
td_2000,
"Results of sorting on ModelAdmin method are out of order.",
)
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "-6"}
)
self.assertContentBefore(
response,
td_2000,
td_2008,
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
td_2008,
td_2009,
"Results of sorting on ModelAdmin method are out of order.",
)
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse("admin:admin_views_person_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_person_change", args=(p2.pk,))
link3 = reverse("admin:admin_views_person_change", args=(p3.pk,))
# Sort by name, gender
response = self.client.get(
reverse("admin:admin_views_person_changelist"), {"o": "1.2"}
)
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(
reverse("admin:admin_views_person_changelist"), {"o": "-2.1"}
)
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse("admin:admin_views_person_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_person_change", args=(p2.pk,))
link3 = reverse("admin:admin_views_person_change", args=(p3.pk,))
response = self.client.get(reverse("admin:admin_views_person_changelist"), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso="ur", name="Urdu")
l2 = Language.objects.create(iso="ar", name="Arabic")
link1 = reverse("admin:admin_views_language_change", args=(quote(l1.pk),))
link2 = reverse("admin:admin_views_language_change", args=(quote(l2.pk),))
response = self.client.get(reverse("admin:admin_views_language_changelist"), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(
reverse("admin:admin_views_language_changelist"), {"o": "-1"}
)
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse("admin:admin_views_podcast_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_podcast_change", args=(p2.pk,))
response = self.client.get(reverse("admin:admin_views_podcast_changelist"), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# The changelist displays the correct columns if two columns correspond
# to the same ordering field.
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse("admin:admin_views_podcast_change", args=(quote(p1.pk),))
link2 = reverse("admin:admin_views_podcast_change", args=(quote(p2.pk),))
response = self.client.get(reverse("admin:admin_views_podcast_changelist"), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse("admin:admin_views_complexsortedperson_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_complexsortedperson_change", args=(p2.pk,))
response = self.client.get(
reverse("admin:admin_views_complexsortedperson_changelist"), {}
)
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, "Name")
self.assertContains(response, "Colored name")
# Check order
self.assertContentBefore(response, "Name", "Colored name")
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
The admin shows default sort indicators for all kinds of 'ordering'
fields: field names, method on the model admin and model itself, and
other callables. See #17252.
"""
models = [
(AdminOrderedField, "adminorderedfield"),
(AdminOrderedModelMethod, "adminorderedmodelmethod"),
(AdminOrderedAdminMethod, "adminorderedadminmethod"),
(AdminOrderedCallable, "adminorderedcallable"),
]
for model, url in models:
model.objects.create(stuff="The Last Item", order=3)
model.objects.create(stuff="The First Item", order=1)
model.objects.create(stuff="The Middle Item", order=2)
response = self.client.get(
reverse("admin:admin_views_%s_changelist" % url), {}
)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(
response.context["cl"].get_ordering_field_columns(), {2: "asc"}
)
# Check order of records.
self.assertContentBefore(response, "The First Item", "The Middle Item")
self.assertContentBefore(response, "The Middle Item", "The Last Item")
def test_has_related_field_in_list_display_fk(self):
"""Joins shouldn't be performed for <FK>_id fields in list display."""
state = State.objects.create(name="Karnataka")
City.objects.create(state=state, name="Bangalore")
response = self.client.get(reverse("admin:admin_views_city_changelist"), {})
response.context["cl"].list_display = ["id", "name", "state"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), True)
response.context["cl"].list_display = ["id", "name", "state_id"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), False)
def test_has_related_field_in_list_display_o2o(self):
"""Joins shouldn't be performed for <O2O>_id fields in list display."""
media = Media.objects.create(name="Foo")
Vodcast.objects.create(media=media)
response = self.client.get(reverse("admin:admin_views_vodcast_changelist"), {})
response.context["cl"].list_display = ["media"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), True)
response.context["cl"].list_display = ["media_id"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), False)
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get(reverse("admin:admin_views_thing_changelist"))
self.assertContains(
response,
'<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view",
)
self.assertNotContains(
response,
'<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to",
)
def test_relation_spanning_filters(self):
changelist_url = reverse("admin:admin_views_chapterxtra1_changelist")
response = self.client.get(changelist_url)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
"chap__id__exact": {
"values": [c.id for c in Chapter.objects.all()],
"test": lambda obj, value: obj.chap.id == value,
},
"chap__title": {
"values": [c.title for c in Chapter.objects.all()],
"test": lambda obj, value: obj.chap.title == value,
},
"chap__book__id__exact": {
"values": [b.id for b in Book.objects.all()],
"test": lambda obj, value: obj.chap.book.id == value,
},
"chap__book__name": {
"values": [b.name for b in Book.objects.all()],
"test": lambda obj, value: obj.chap.book.name == value,
},
"chap__book__promo__id__exact": {
"values": [p.id for p in Promo.objects.all()],
"test": lambda obj, value: obj.chap.book.promo_set.filter(
id=value
).exists(),
},
"chap__book__promo__name": {
"values": [p.name for p in Promo.objects.all()],
"test": lambda obj, value: obj.chap.book.promo_set.filter(
name=value
).exists(),
},
# A forward relation (book) after a reverse relation (promo).
"guest_author__promo__book__id__exact": {
"values": [p.id for p in Book.objects.all()],
"test": lambda obj, value: obj.guest_author.promo_set.filter(
book=value
).exists(),
},
}
for filter_path, params in filters.items():
for value in params["values"]:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s"' % query_string)
# ensure link works
filtered_response = self.client.get(
"%s?%s" % (changelist_url, query_string)
)
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context["cl"].queryset.all():
self.assertTrue(params["test"](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse("admin:admin_views_thing_changelist")
response = self.client.get(changelist_url, {"notarealfield": "5"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {"notarealfield__whatever": "5"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
response = self.client.get(
changelist_url, {"color__id__exact": "StringNotInteger!"}
)
self.assertRedirects(response, "%s?e=1" % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {"pub_date__gte": "foo"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(
title="I Could Go Anywhere",
content="Versatile",
date=datetime.datetime.now(),
)
changelist_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(changelist_url)
self.assertContains(response, "4 articles")
response = self.client.get(changelist_url, {"section__isnull": "false"})
self.assertContains(response, "3 articles")
response = self.client.get(changelist_url, {"section__isnull": "0"})
self.assertContains(response, "3 articles")
response = self.client.get(changelist_url, {"section__isnull": "true"})
self.assertContains(response, "1 article")
response = self.client.get(changelist_url, {"section__isnull": "1"})
self.assertContains(response, "1 article")
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse("admin:admin_views_article_changelist"))
self.assertContains(
response,
'<form id="logout-form" method="post" action="%s">'
% reverse("admin:logout"),
)
self.assertContains(
response, '<a href="%s">' % reverse("admin:password_change")
)
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse("admin:admin_views_fabric_change", args=(self.fab1.pk,))
link2 = reverse("admin:admin_views_fabric_change", args=(self.fab2.pk,))
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(
response,
'<a href="%s">Horizontal</a>' % link1,
msg_prefix=fail_msg,
html=True,
)
self.assertContains(
response,
'<a href="%s">Vertical</a>' % link2,
msg_prefix=fail_msg,
html=True,
)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(
response,
'<a href="?surface__exact=x">Horizontal</a>',
msg_prefix=fail_msg,
html=True,
)
self.assertContains(
response,
'<a href="?surface__exact=y">Vertical</a>',
msg_prefix=fail_msg,
html=True,
)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse("admin:admin_views_post_changelist"))
self.assertContains(response, "icon-unknown.svg")
def test_display_decorator_with_boolean_and_empty_value(self):
msg = (
"The boolean and empty_value arguments to the @display decorator "
"are mutually exclusive."
)
with self.assertRaisesMessage(ValueError, msg):
class BookAdmin(admin.ModelAdmin):
@admin.display(boolean=True, empty_value="(Missing)")
def is_published(self, obj):
return obj.publish_date is not None
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE="fr"), translation.override("en-us"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertNotContains(response, "Choisir une heure")
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="fr"), translation.override("none"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertContains(response, "Choisir une heure")
def test_jsi18n_with_context(self):
response = self.client.get(reverse("admin-extra-context:jsi18n"))
self.assertEqual(response.status_code, 200)
def test_jsi18n_format_fallback(self):
"""
The JavaScript i18n view doesn't return localized date/time formats
when the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="ru"), translation.override("none"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertNotContains(response, "%d.%m.%Y %H:%M:%S")
self.assertContains(response, "%Y-%m-%d %H:%M:%S")
def test_disallowed_filtering(self):
with self.assertLogs("django.security.DisallowedModelAdminLookup", "ERROR"):
response = self.client.get(
"%s?owner__email__startswith=fuzzy"
% reverse("admin:admin_views_album_changelist")
)
self.assertEqual(response.status_code, 400)
# Filters are allowed if explicitly included in list_filter
response = self.client.get(
"%s?color__value__startswith=red"
% reverse("admin:admin_views_thing_changelist")
)
self.assertEqual(response.status_code, 200)
response = self.client.get(
"%s?color__value=red" % reverse("admin:admin_views_thing_changelist")
)
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to allow them in list_filter or date_hierarchy.
response = self.client.get(
"%s?age__gt=30" % reverse("admin:admin_views_person_changelist")
)
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(
name="Anonymous", gender=1, age=22, alive=True, code="123"
)
e2 = Employee.objects.create(
name="Visitor", gender=2, age=19, alive=True, code="124"
)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse("admin:admin_views_workhour_changelist"))
self.assertContains(response, "employee__person_ptr__exact")
response = self.client.get(
"%s?employee__person_ptr__exact=%d"
% (reverse("admin:admin_views_workhour_changelist"), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
url = reverse("admin:admin_views_section_changelist")
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(url, {TO_FIELD_VAR: "missing_field"})
self.assertEqual(response.status_code, 400)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(
reverse("admin:admin_views_section_changelist"), {TO_FIELD_VAR: "name"}
)
self.assertEqual(response.status_code, 400)
# Primary key should always be allowed, even if the referenced model
# isn't registered.
response = self.client.get(
reverse("admin:admin_views_notreferenced_changelist"), {TO_FIELD_VAR: "id"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field referenced by another model though a m2m should be
# allowed.
response = self.client.get(
reverse("admin:admin_views_recipe_changelist"), {TO_FIELD_VAR: "rname"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field referenced through a reverse m2m relationship
# should be allowed.
response = self.client.get(
reverse("admin:admin_views_ingredient_changelist"), {TO_FIELD_VAR: "iname"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field that is not referred by any other model directly
# registered to this admin site but registered through inheritance
# should be allowed.
response = self.client.get(
reverse("admin:admin_views_referencedbyparent_changelist"),
{TO_FIELD_VAR: "name"},
)
self.assertEqual(response.status_code, 200)
# Specifying a field that is only referred to by a inline of a
# registered model should be allowed.
response = self.client.get(
reverse("admin:admin_views_referencedbyinline_changelist"),
{TO_FIELD_VAR: "name"},
)
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse("admin:admin_views_referencedbygenrel_changelist")
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(url, {TO_FIELD_VAR: "object_id"})
self.assertEqual(response.status_code, 400)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(
reverse("admin:admin_views_section_add"), {TO_FIELD_VAR: "name"}
)
self.assertEqual(response.status_code, 400)
section = Section.objects.create()
url = reverse("admin:admin_views_section_change", args=(section.pk,))
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(url, {TO_FIELD_VAR: "name"})
self.assertEqual(response.status_code, 400)
url = reverse("admin:admin_views_section_delete", args=(section.pk,))
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(url, {TO_FIELD_VAR: "name"})
self.assertEqual(response.status_code, 400)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey
# pointing to this model.
url = "%s?leader__name=Palin&leader__age=27" % reverse(
"admin:admin_views_inquisition_changelist"
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get(
"%s?%s" % (reverse("admin:admin_views_actor_changelist"), IS_POPUP_VAR)
)
self.assertContains(response, 'data-popup-opener="%s"' % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username="super")
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse("admin:index"))
self.assertNotContains(
response,
reverse("admin:password_change"),
msg_prefix=(
'The "change password" link should not be displayed if a user does not '
"have a usable password."
),
)
def test_change_view_with_show_delete_extra_context(self):
"""
The 'show_delete' context variable in the admin's change view controls
the display of the delete button.
"""
instance = UndeletableObject.objects.create(name="foo")
response = self.client.get(
reverse("admin:admin_views_undeletableobject_change", args=(instance.pk,))
)
self.assertNotContains(response, "deletelink")
def test_change_view_logs_m2m_field_changes(self):
"""Changes to ManyToManyFields are included in the object's history."""
pizza = ReadablePizza.objects.create(name="Cheese")
cheese = Topping.objects.create(name="cheese")
post_data = {"name": pizza.name, "toppings": [cheese.pk]}
response = self.client.post(
reverse("admin:admin_views_readablepizza_change", args=(pizza.pk,)),
post_data,
)
self.assertRedirects(
response, reverse("admin:admin_views_readablepizza_changelist")
)
pizza_ctype = ContentType.objects.get_for_model(
ReadablePizza, for_concrete_model=False
)
log = LogEntry.objects.filter(
content_type=pizza_ctype, object_id=pizza.pk
).first()
self.assertEqual(log.get_change_message(), "Changed Toppings.")
def test_allows_attributeerror_to_bubble_up(self):
"""
AttributeErrors are allowed to bubble when raised inside a change list
view. Requires a model to be created so there's something to display.
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse("admin:admin_views_simple_changelist"))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls (#20934).
"""
o = UnchangeableObject.objects.create()
response = self.client.get(
reverse("admin:admin_views_unchangeableobject_changelist")
)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(
response, '<th class="field-__str__">%s</th>' % o, html=True
)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = "/test_admin/admin/admin_views/"
confirm_good_url = reverse(
"admin:app_list", kwargs={"app_label": "admin_views"}
)
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse("admin:app_list", kwargs={"app_label": "this_should_fail"})
with self.assertRaises(NoReverseMatch):
reverse("admin:app_list", args=("admin_views2",))
def test_resolve_admin_views(self):
index_match = resolve("/test_admin/admin4/")
list_match = resolve("/test_admin/admin4/auth/user/")
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(
list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin
)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse("admin:index")
response = self.client.get(url)
self.assertEqual(response.context["site_url"], "/my-site-url/")
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
def test_date_hierarchy_empty_queryset(self):
self.assertIs(Question.objects.exists(), False)
response = self.client.get(reverse("admin:admin_views_answer2_changelist"))
self.assertEqual(response.status_code, 200)
@override_settings(TIME_ZONE="America/Sao_Paulo", USE_TZ=True)
def test_date_hierarchy_timezone_dst(self):
# This datetime doesn't exist in this timezone due to DST.
for date in make_aware_datetimes(
datetime.datetime(2016, 10, 16, 15), "America/Sao_Paulo"
):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question="Why?", expires=date)
Answer2.objects.create(question=q, answer="Because.")
response = self.client.get(
reverse("admin:admin_views_answer2_changelist")
)
self.assertContains(response, "question__expires__day=16")
self.assertContains(response, "question__expires__month=10")
self.assertContains(response, "question__expires__year=2016")
@override_settings(TIME_ZONE="America/Los_Angeles", USE_TZ=True)
def test_date_hierarchy_local_date_differ_from_utc(self):
# This datetime is 2017-01-01 in UTC.
for date in make_aware_datetimes(
datetime.datetime(2016, 12, 31, 16), "America/Los_Angeles"
):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question="Why?", expires=date)
Answer2.objects.create(question=q, answer="Because.")
response = self.client.get(
reverse("admin:admin_views_answer2_changelist")
)
self.assertContains(response, "question__expires__day=31")
self.assertContains(response, "question__expires__month=12")
self.assertContains(response, "question__expires__year=2016")
def test_sortable_by_columns_subset(self):
expected_sortable_fields = ("date", "callable_year")
expected_not_sortable_fields = (
"content",
"model_year",
"modeladmin_year",
"model_year_reversed",
"section",
)
response = self.client.get(reverse("admin6:admin_views_article_changelist"))
for field_name in expected_sortable_fields:
self.assertContains(
response, '<th scope="col" class="sortable column-%s">' % field_name
)
for field_name in expected_not_sortable_fields:
self.assertContains(
response, '<th scope="col" class="column-%s">' % field_name
)
def test_get_sortable_by_columns_subset(self):
response = self.client.get(reverse("admin6:admin_views_actor_changelist"))
self.assertContains(response, '<th scope="col" class="sortable column-age">')
self.assertContains(response, '<th scope="col" class="column-name">')
def test_sortable_by_no_column(self):
expected_not_sortable_fields = ("title", "book")
response = self.client.get(reverse("admin6:admin_views_chapter_changelist"))
for field_name in expected_not_sortable_fields:
self.assertContains(
response, '<th scope="col" class="column-%s">' % field_name
)
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_get_sortable_by_no_column(self):
response = self.client.get(reverse("admin6:admin_views_color_changelist"))
self.assertContains(response, '<th scope="col" class="column-value">')
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_app_index_context(self):
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(
response,
"<title>Admin_Views administration | Django site admin</title>",
)
self.assertEqual(response.context["title"], "Admin_Views administration")
self.assertEqual(response.context["app_label"], "admin_views")
# Models are sorted alphabetically by default.
models = [model["name"] for model in response.context["app_list"][0]["models"]]
self.assertSequenceEqual(models, sorted(models))
def test_app_index_context_reordered(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin2:app_list", args=("admin_views",)))
self.assertContains(
response,
"<title>Admin_Views administration | Django site admin</title>",
)
# Models are in reverse order.
models = [model["name"] for model in response.context["app_list"][0]["models"]]
self.assertSequenceEqual(models, sorted(models, reverse=True))
def test_change_view_subtitle_per_object(self):
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a1.pk,)),
)
self.assertContains(
response,
"<title>Article 1 | Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
self.assertContains(response, "<h2>Article 1</h2>")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a2.pk,)),
)
self.assertContains(
response,
"<title>Article 2 | Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
self.assertContains(response, "<h2>Article 2</h2>")
def test_view_subtitle_per_object(self):
viewuser = User.objects.create_user(
username="viewuser",
password="secret",
is_staff=True,
)
viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("view", Article._meta)),
)
self.client.force_login(viewuser)
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a1.pk,)),
)
self.assertContains(
response,
"<title>Article 1 | View article | Django site admin</title>",
)
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<h2>Article 1</h2>")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a2.pk,)),
)
self.assertContains(
response,
"<title>Article 2 | View article | Django site admin</title>",
)
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<h2>Article 2</h2>")
def test_formset_kwargs_can_be_overridden(self):
response = self.client.get(reverse("admin:admin_views_city_add"))
self.assertContains(response, "overridden_name")
def test_render_views_no_subtitle(self):
tests = [
reverse("admin:index"),
reverse("admin:password_change"),
reverse("admin:app_list", args=("admin_views",)),
reverse("admin:admin_views_article_delete", args=(self.a1.pk,)),
reverse("admin:admin_views_article_history", args=(self.a1.pk,)),
]
for url in tests:
with self.subTest(url=url):
with self.assertNoLogs("django.template", "DEBUG"):
self.client.get(url)
# Login must be after logout.
with self.assertNoLogs("django.template", "DEBUG"):
self.client.post(reverse("admin:logout"))
self.client.get(reverse("admin:login"))
def test_render_delete_selected_confirmation_no_subtitle(self):
post_data = {
"action": "delete_selected",
"selected_across": "0",
"index": "0",
"_selected_action": self.a1.pk,
}
with self.assertNoLogs("django.template", "DEBUG"):
self.client.post(reverse("admin:admin_views_article_changelist"), post_data)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation."
"NumericPasswordValidator"
)
},
]
)
def test_password_change_helptext(self):
response = self.client.get(reverse("admin:password_change"))
self.assertContains(
response, '<div class="help" id="id_new_password1_helptext">'
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation." "NumericPasswordValidator"
)
},
],
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
# Put this app's and the shared tests templates dirs in DIRS to
# take precedence over the admin's templates dir.
"DIRS": [
os.path.join(os.path.dirname(__file__), "templates"),
os.path.join(os.path.dirname(os.path.dirname(__file__)), "templates"),
],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(
reverse("admin:admin_views_customarticle_changelist")
)
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, "custom_admin/change_list.html")
# Test custom add form template
response = self.client.get(reverse("admin:admin_views_customarticle_add"))
self.assertTemplateUsed(response, "custom_admin/add_form.html")
# Add an article so we can test delete, change, and history views
post = self.client.post(
reverse("admin:admin_views_customarticle_add"),
{
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
},
)
self.assertRedirects(
post, reverse("admin:admin_views_customarticle_changelist")
)
self.assertEqual(CustomArticle.objects.count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(
reverse("admin:admin_views_customarticle_change", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/change_form.html")
response = self.client.get(
reverse("admin:admin_views_customarticle_delete", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/delete_confirmation.html")
response = self.client.post(
reverse("admin:admin_views_customarticle_changelist"),
data={
"index": 0,
"action": ["delete_selected"],
"_selected_action": ["1"],
},
)
self.assertTemplateUsed(
response, "custom_admin/delete_selected_confirmation.html"
)
response = self.client.get(
reverse("admin:admin_views_customarticle_history", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/object_history.html")
# A custom popup response template may be specified by
# ModelAdmin.popup_response_template.
response = self.client.post(
reverse("admin:admin_views_customarticle_add") + "?%s=1" % IS_POPUP_VAR,
{
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
IS_POPUP_VAR: "1",
},
)
self.assertEqual(response.template_name, "custom_admin/popup_response.html")
def test_extended_bodyclass_template_change_form(self):
"""
The admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_change_password_template(self):
user = User.objects.get(username="super")
response = self.client.get(
reverse("admin:auth_user_password_change", args=(user.id,))
)
# The auth/user/change_password.html template uses super in the
# bodyclass block.
self.assertContains(response, "bodyclass_consistency_check ")
# When a site has multiple passwords in the browser's password manager,
# a browser pop up asks which user the new password is for. To prevent
# this, the username is added to the change password form.
self.assertContains(
response, '<input type="text" name="username" value="super" class="hidden">'
)
# help text for passwords has an id.
self.assertContains(
response,
'<div class="help" id="id_password1_helptext"><ul><li>'
"Your password can’t be too similar to your other personal information."
"</li><li>Your password can’t be entirely numeric.</li></ul></div>",
)
self.assertContains(
response,
'<div class="help" id="id_password2_helptext">'
"Enter the same password as before, for verification.</div>",
)
def test_extended_bodyclass_template_index(self):
"""
The admin/index.html template uses block.super in the bodyclass block.
"""
response = self.client.get(reverse("admin:index"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_change_list(self):
"""
The admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse("admin:admin_views_article_changelist"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_login(self):
"""
The admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse("admin:login"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_delete_confirmation(self):
"""
The admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse("admin:auth_group_delete", args=(group.id,)))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
The admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
"action": "delete_selected",
"selected_across": "0",
"index": "0",
"_selected_action": group.id,
}
response = self.client.post(reverse("admin:auth_group_changelist"), post_data)
self.assertEqual(response.context["site_header"], "Django administration")
self.assertContains(response, "bodyclass_consistency_check ")
def test_filter_with_custom_template(self):
"""
A custom template can be used to render an admin filter.
"""
response = self.client.get(reverse("admin:admin_views_color2_changelist"))
self.assertTemplateUsed(response, "custom_filter_template.html")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_change_form_URL_has_correct_value(self):
"""
change_view has form_url in response.context
"""
response = self.client.get(
reverse(
"admin:admin_views_section_change",
args=(self.s1.pk,),
current_app=self.current_app,
)
)
self.assertIn(
"form_url", response.context, msg="form_url not present in response.context"
)
self.assertEqual(response.context["form_url"], "pony")
def test_initial_data_can_be_overridden(self):
"""
The behavior for setting initial form data can be overridden in the
ModelAdmin class. Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse("admin:admin_views_restaurant_add", current_app=self.current_app),
{"name": "test_value"},
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_js_minified_only_if_debug_is_false(self):
"""
The minified versions of the JS files are only used when DEBUG is False.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertNotContains(response, "vendor/jquery/jquery.js")
self.assertContains(response, "vendor/jquery/jquery.min.js")
self.assertContains(response, "prepopulate.js")
self.assertContains(response, "actions.js")
self.assertContains(response, "collapse.js")
self.assertContains(response, "inlines.js")
with override_settings(DEBUG=True):
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, "vendor/jquery/jquery.js")
self.assertNotContains(response, "vendor/jquery/jquery.min.js")
self.assertContains(response, "prepopulate.js")
self.assertContains(response, "actions.js")
self.assertContains(response, "collapse.js")
self.assertContains(response, "inlines.js")
@override_settings(ROOT_URLCONF="admin_views.urls")
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_as_duplication(self):
"""'save as' creates a new person"""
post_data = {"_saveasnew": "", "name": "John M", "gender": 1, "age": 42}
response = self.client.post(
reverse("admin:admin_views_person_change", args=(self.per1.pk,)), post_data
)
self.assertEqual(len(Person.objects.filter(name="John M")), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
new_person = Person.objects.latest("id")
self.assertRedirects(
response, reverse("admin:admin_views_person_change", args=(new_person.pk,))
)
def test_save_as_continue_false(self):
"""
Saving a new object using "Save as new" redirects to the changelist
instead of the change view when ModelAdmin.save_as_continue=False.
"""
post_data = {"_saveasnew": "", "name": "John M", "gender": 1, "age": 42}
url = reverse(
"admin:admin_views_person_change",
args=(self.per1.pk,),
current_app=site2.name,
)
response = self.client.post(url, post_data)
self.assertEqual(len(Person.objects.filter(name="John M")), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
self.assertRedirects(
response,
reverse("admin:admin_views_person_changelist", current_app=site2.name),
)
def test_save_as_new_with_validation_errors(self):
"""
When you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(
reverse("admin:admin_views_person_change", args=(self.per1.pk,)),
{
"_saveasnew": "",
"gender": "invalid",
"_addanother": "fail",
},
)
self.assertContains(response, "Please correct the errors below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name="Father")
child = Child.objects.create(parent=parent, name="Child")
response = self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.pk,)),
{
"_saveasnew": "Save as new",
"child_set-0-parent": parent.pk,
"child_set-0-id": child.pk,
"child_set-0-name": "Child",
"child_set-INITIAL_FORMS": 1,
"child_set-MAX_NUM_FORMS": 1000,
"child_set-MIN_NUM_FORMS": 0,
"child_set-TOTAL_FORMS": 4,
"name": "_invalid",
},
)
self.assertContains(response, "Please correct the error below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name="Father")
child = Child.objects.create(parent=parent, name="Child")
response = self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.pk,)),
{
"_saveasnew": "Save as new",
"child_set-0-parent": parent.pk,
"child_set-0-id": child.pk,
"child_set-0-name": "_invalid",
"child_set-INITIAL_FORMS": 1,
"child_set-MAX_NUM_FORMS": 1000,
"child_set-MIN_NUM_FORMS": 0,
"child_set-TOTAL_FORMS": 4,
"name": "Father",
},
)
self.assertContains(response, "Please correct the error below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
@override_settings(ROOT_URLCONF="admin_views.urls")
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse("admin2:index"), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(
reverse("admin2:login"),
{
REDIRECT_FIELD_NAME: reverse("admin2:index"),
"username": "customform",
"password": "secret",
},
follow=True,
)
self.assertIsInstance(login, TemplateResponse)
self.assertContains(login, "custom form error")
self.assertContains(login, "path/to/media.css")
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse("admin2:index"), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/login.html")
self.assertContains(response, "Hello from a custom login template")
def test_custom_admin_site_logout_template(self):
response = self.client.post(reverse("admin2:logout"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/logout.html")
self.assertContains(response, "Hello from a custom logout template")
def test_custom_admin_site_index_view_and_template(self):
response = self.client.get(reverse("admin2:index"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/index.html")
self.assertContains(response, "Hello from a custom index template *bar*")
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse("admin2:app_list", args=("admin_views",)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/app_index.html")
self.assertContains(response, "Hello from a custom app_index template")
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse("admin2:password_change"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_form.html")
self.assertContains(
response, "Hello from a custom password change form template"
)
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse("admin2:password_change"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_form.html")
self.assertContains(response, "eggs")
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse("admin2:password_change_done"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_done.html")
self.assertContains(
response, "Hello from a custom password change done template"
)
def test_custom_admin_site_view(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin2:my_view"))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.force_login(self.superuser)
su = User.objects.get(username="super")
response = self.client.get(
reverse("admin4:auth_user_password_change", args=(su.pk,))
)
self.assertEqual(response.status_code, 200)
def get_perm(Model, codename):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model, for_concrete_model=False)
return Permission.objects.get(content_type=ct, codename=codename)
@override_settings(
ROOT_URLCONF="admin_views.urls",
# Test with the admin's documented list of required context processors.
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.viewuser = User.objects.create_user(
username="viewuser", password="secret", is_staff=True
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
cls.nostaffuser = User.objects.create_user(
username="nostaff", password="secret"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
another_section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can view Articles
cls.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("view", opts))
)
# User who can add Articles
cls.adduser.user_permissions.add(
get_perm(Article, get_permission_codename("add", opts))
)
# User who can change Articles
cls.changeuser.user_permissions.add(
get_perm(Article, get_permission_codename("change", opts))
)
cls.nostaffuser.user_permissions.add(
get_perm(Article, get_permission_codename("change", opts))
)
# User who can delete Articles
cls.deleteuser.user_permissions.add(
get_perm(Article, get_permission_codename("delete", opts))
)
cls.deleteuser.user_permissions.add(
get_perm(Section, get_permission_codename("delete", Section._meta))
)
# login POST dicts
cls.index_url = reverse("admin:index")
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "super",
"password": "secret",
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "[email protected]",
"password": "secret",
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "[email protected]",
"password": "notsecret",
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "adduser",
"password": "secret",
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "changeuser",
"password": "secret",
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "deleteuser",
"password": "secret",
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse("has_permission_admin:index"),
"username": "nostaff",
"password": "secret",
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "joepublic",
"password": "secret",
}
cls.viewuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "viewuser",
"password": "secret",
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"password": "secret",
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = "%s?next=%s" % (reverse("admin:login"), reverse("admin:index"))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username="jondoe", password="secret", email="[email protected]")
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# View User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.viewuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
self.assertFormError(
login.context["form"], "username", ["This field is required."]
)
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse("admin:login"))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse("admin:index"))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"), self.joepublic_login
)
self.assertContains(login, "permission denied")
# User with permissions should be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"), self.nostaff_login
)
self.assertRedirects(login, reverse("has_permission_admin:index"))
self.assertFalse(login.context)
self.client.post(reverse("has_permission_admin:logout"))
# Staff should be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"),
{
REDIRECT_FIELD_NAME: reverse("has_permission_admin:index"),
"username": "deleteuser",
"password": "secret",
},
)
self.assertRedirects(login, reverse("has_permission_admin:index"))
self.assertFalse(login.context)
self.client.post(reverse("has_permission_admin:logout"))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = "the-answer=42"
redirect_url = "%s?%s" % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
"%s?%s" % (reverse("admin:login"), urlencode(new_next)), post_data
)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = "%s?next=%s" % (reverse("admin:login"), reverse("admin:index"))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = "You are authenticated as {}"
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, "login-form")
self.assertNotContains(response, hint_template.format(""), status_code=200)
# Non-staff user should be shown the hint
self.client.force_login(self.nostaffuser)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, "login-form")
self.assertContains(
response, hint_template.format(self.nostaffuser.username), status_code=200
)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
add_dict = {
"title": "Døm ikke",
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
# Change User should not have access to add articles
self.client.force_login(self.changeuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.post(reverse("admin:logout"))
# View User should not have access to add articles
self.client.force_login(self.viewuser)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
# Now give the user permission to add but not change.
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("add", Article._meta))
)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.context["title"], "Add article")
self.assertContains(response, "<title>Add article | Django site admin</title>")
self.assertContains(
response, '<input type="submit" value="Save and view" name="_continue">'
)
post = self.client.post(
reverse("admin:admin_views_article_add"), add_dict, follow=False
)
self.assertEqual(post.status_code, 302)
self.assertEqual(Article.objects.count(), 4)
article = Article.objects.latest("pk")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(article.pk,))
)
self.assertContains(
response,
'<li class="success">The article “Døm ikke” was added successfully.</li>',
)
article.delete()
self.client.post(reverse("admin:logout"))
# Add user may login and POST to add view, then redirect to admin root
self.client.force_login(self.adduser)
addpage = self.client.get(reverse("admin:admin_views_article_add"))
change_list_link = '› <a href="%s">Articles</a>' % reverse(
"admin:admin_views_article_changelist"
)
self.assertNotContains(
addpage,
change_list_link,
msg_prefix=(
"User restricted to add permission is given link to change list view "
"in breadcrumbs."
),
)
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, "Greetings from a created object")
self.client.post(reverse("admin:logout"))
# The addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.adduser.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.get_change_message(), "Added.")
# Super can add too, but is redirected to the change list view
self.client.force_login(self.superuser)
addpage = self.client.get(reverse("admin:admin_views_article_add"))
self.assertContains(
addpage,
change_list_link,
msg_prefix=(
"Unrestricted user is not given link to change list view in "
"breadcrumbs."
),
)
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertRedirects(post, reverse("admin:admin_views_article_changelist"))
self.assertEqual(Article.objects.count(), 5)
self.client.post(reverse("admin:logout"))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.force_login(self.joepublicuser)
# Check and make sure that if user expires, data still persists
self.client.force_login(self.superuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
@mock.patch("django.contrib.admin.options.InlineModelAdmin.has_change_permission")
def test_add_view_with_view_only_inlines(self, has_change_permission):
"""User with add permission to a section but view-only for inlines."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("add", Section._meta))
)
self.client.force_login(self.viewuser)
# Valid POST creates a new section.
data = {
"name": "New obj",
"article_set-TOTAL_FORMS": 0,
"article_set-INITIAL_FORMS": 0,
}
response = self.client.post(reverse("admin:admin_views_section_add"), data)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(Section.objects.latest("id").name, data["name"])
# InlineModelAdmin.has_change_permission()'s obj argument is always
# None during object add.
self.assertEqual(
[obj for (request, obj), _ in has_change_permission.call_args_list],
[None, None],
)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {
"title": "Ikke fordømt",
"content": "<p>edited article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
article_change_url = reverse(
"admin:admin_views_article_change", args=(self.a1.pk,)
)
article_changelist_url = reverse("admin:admin_views_article_changelist")
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.post(reverse("admin:logout"))
# view user can view articles but not make changes.
self.client.force_login(self.viewuser)
response = self.client.get(article_changelist_url)
self.assertContains(
response,
"<title>Select article to view | Django site admin</title>",
)
self.assertContains(response, "<h1>Select article to view</h1>")
self.assertEqual(response.context["title"], "Select article to view")
response = self.client.get(article_change_url)
self.assertContains(response, "<title>View article | Django site admin</title>")
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<label>Extra form field:</label>")
self.assertContains(
response,
'<a href="/test_admin/admin/admin_views/article/" class="closelink">Close'
"</a>",
)
self.assertEqual(response.context["title"], "View article")
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(
Article.objects.get(pk=self.a1.pk).content, "<p>Middle content</p>"
)
self.client.post(reverse("admin:logout"))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.context["title"], "Select article to change")
self.assertContains(
response,
"<title>Select article to change | Django site admin</title>",
)
self.assertContains(response, "<h1>Select article to change</h1>")
response = self.client.get(article_change_url)
self.assertEqual(response.context["title"], "Change article")
self.assertContains(
response,
"<title>Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(
Article.objects.get(pk=self.a1.pk).content, "<p>edited article</p>"
)
# one error in form should produce singular error message, multiple
# errors plural.
change_dict["title"] = ""
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post,
"Please correct the error below.",
msg_prefix=(
"Singular error message not found in response to post with one error"
),
)
change_dict["content"] = ""
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post,
"Please correct the errors below.",
msg_prefix=(
"Plural error message not found in response to post with multiple "
"errors"
),
)
self.client.post(reverse("admin:logout"))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
r3 = RowLevelChangePermissionModel.objects.create(id=3, name="odd id mult 3")
r6 = RowLevelChangePermissionModel.objects.create(id=6, name="even id mult 3")
change_url_1 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r1.pk,)
)
change_url_2 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r2.pk,)
)
change_url_3 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r3.pk,)
)
change_url_6 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r6.pk,)
)
logins = [
self.superuser,
self.viewuser,
self.adduser,
self.changeuser,
self.deleteuser,
]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=1).name, "odd id"
)
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=2).name, "changed"
)
self.assertRedirects(response, self.index_url)
response = self.client.get(change_url_3)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_3, {"name": "changed"})
self.assertEqual(response.status_code, 403)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=3).name,
"odd id mult 3",
)
response = self.client.get(change_url_6)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_6, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=6).name, "changed"
)
self.assertRedirects(response, self.index_url)
self.client.post(reverse("admin:logout"))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1, follow=True)
self.assertContains(response, "login-form")
response = self.client.post(
change_url_1, {"name": "changed"}, follow=True
)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=1).name, "odd id"
)
self.assertContains(response, "login-form")
response = self.client.get(change_url_2, follow=True)
self.assertContains(response, "login-form")
response = self.client.post(
change_url_2, {"name": "changed again"}, follow=True
)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=2).name, "changed"
)
self.assertContains(response, "login-form")
self.client.post(reverse("admin:logout"))
def test_change_view_without_object_change_permission(self):
"""
The object should be read-only if the user has permission to view it
and change objects of that type but not to change the current object.
"""
change_url = reverse("admin9:admin_views_article_change", args=(self.a1.pk,))
self.client.force_login(self.viewuser)
response = self.client.get(change_url)
self.assertEqual(response.context["title"], "View article")
self.assertContains(response, "<title>View article | Django site admin</title>")
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(
response,
'<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close'
"</a>",
)
def test_change_view_save_as_new(self):
"""
'Save as new' should raise PermissionDenied for users without the 'add'
permission.
"""
change_dict_save_as_new = {
"_saveasnew": "Save as new",
"title": "Ikke fordømt",
"content": "<p>edited article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
article_change_url = reverse(
"admin:admin_views_article_change", args=(self.a1.pk,)
)
# Add user can perform "Save as new".
article_count = Article.objects.count()
self.client.force_login(self.adduser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), article_count + 1)
self.client.logout()
# Change user cannot perform "Save as new" (no 'add' permission).
article_count = Article.objects.count()
self.client.force_login(self.changeuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), article_count)
# User with both add and change permissions should be redirected to the
# change page for the newly created object.
article_count = Article.objects.count()
self.client.force_login(self.superuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(Article.objects.count(), article_count + 1)
new_article = Article.objects.latest("id")
self.assertRedirects(
post, reverse("admin:admin_views_article_change", args=(new_article.pk,))
)
def test_change_view_with_view_only_inlines(self):
"""
User with change permission to a section but view-only for inlines.
"""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
# Valid POST changes the name.
data = {
"name": "Can edit name with view-only inlines",
"article_set-TOTAL_FORMS": 3,
"article_set-INITIAL_FORMS": 3,
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data["name"])
# Invalid POST reshows inlines.
del data["name"]
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
def test_change_view_with_view_and_add_inlines(self):
"""User has view and add permissions on the inline model."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("add", Article._meta))
)
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 6)
# Valid POST creates a new article.
data = {
"name": "Can edit name with view-only inlines",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-3-id": [""],
"article_set-3-title": ["A title"],
"article_set-3-content": ["Added content"],
"article_set-3-date_0": ["2008-3-18"],
"article_set-3-date_1": ["11:54:58"],
"article_set-3-section": [str(self.s1.pk)],
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data["name"])
self.assertEqual(Article.objects.count(), 4)
# Invalid POST reshows inlines.
del data["name"]
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 6)
def test_change_view_with_view_and_delete_inlines(self):
"""User has view and delete permissions on the inline model."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.client.force_login(self.viewuser)
data = {
"name": "Name is required.",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-0-id": [str(self.a1.pk)],
"article_set-0-DELETE": ["on"],
}
# Inline POST details are ignored without delete permission.
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Article.objects.count(), 3)
# Deletion successful when delete permission is added.
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("delete", Article._meta))
)
data = {
"name": "Name is required.",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-0-id": [str(self.a1.pk)],
"article_set-0-DELETE": ["on"],
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Article.objects.count(), 2)
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {"post": "yes"}
delete_url = reverse("admin:admin_views_article_delete", args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# view user should not be able to delete articles
self.client.force_login(self.viewuser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.force_login(self.deleteuser)
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Greetings from a deleted object")
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_delete_view_with_no_default_permissions(self):
"""
The delete view allows users to delete collected objects without a
'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty).
"""
pizza = ReadOnlyPizza.objects.create(name="Double Cheese")
delete_url = reverse("admin:admin_views_readonlypizza_delete", args=(pizza.pk,))
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/readonlypizza/%s/" % pizza.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Read only pizzas: 1</li>")
post = self.client.post(delete_url, {"post": "yes"})
self.assertRedirects(
post, reverse("admin:admin_views_readonlypizza_changelist")
)
self.assertEqual(ReadOnlyPizza.objects.count(), 0)
def test_delete_view_nonexistent_obj(self):
self.client.force_login(self.deleteuser)
url = reverse("admin:admin_views_article_delete", args=("nonexistent",))
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["article with ID “nonexistent” doesn’t exist. Perhaps it was deleted?"],
)
def test_history_view(self):
"""History view should restrict access."""
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 403)
self.client.post(reverse("admin:logout"))
# view user can view all items
self.client.force_login(self.viewuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 200)
self.client.post(reverse("admin:logout"))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
logins = [
self.superuser,
self.viewuser,
self.adduser,
self.changeuser,
self.deleteuser,
]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl1.pk,),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl2.pk,),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.post(reverse("admin:logout"))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl1.pk,),
)
response = self.client.get(url, follow=True)
self.assertContains(response, "login-form")
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl2.pk,),
)
response = self.client.get(url, follow=True)
self.assertContains(response, "login-form")
self.client.post(reverse("admin:logout"))
def test_history_view_bad_url(self):
self.client.force_login(self.changeuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=("foo",)), follow=True
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["article with ID “foo” doesn’t exist. Perhaps it was deleted?"],
)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.force_login(self.adduser)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse("admin:admin_views_article_add")
add_link_text = "add_id_section"
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("add", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return (
response.context["adminform"]
.form.fields["section"]
.widget.can_change_related
)
self.client.force_login(self.adduser)
# The user can't change sections yet, so they shouldn't see the
# "change section" link.
url = reverse("admin:admin_views_article_add")
change_link_text = "change_id_section"
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the
# "change section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("change", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return (
response.context["adminform"]
.form.fields["sub_section"]
.widget.can_delete_related
)
self.client.force_login(self.adduser)
# The user can't delete sections yet, so they shouldn't see the
# "delete section" link.
url = reverse("admin:admin_views_article_add")
delete_link_text = "delete_id_sub_section"
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the
# "delete section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("delete", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username="super")
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, "Log out")
response = self.client.get(reverse("secure_view"), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username="super")
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, "Log out")
response = self.client.get(reverse("secure_view"), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_list_permissions(self):
"""
If a user has no module perms, the app list returns a 404.
"""
opts = Article._meta
change_user = User.objects.get(username="changeuser")
permission = get_perm(Article, get_permission_codename("change", opts))
self.client.force_login(self.changeuser)
# the user has no module permissions
change_user.user_permissions.remove(permission)
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(response.status_code, 404)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk="foo")
shortcut_url = reverse("admin:view_on_site", args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, "admin/login.html")
# Logged in? Redirect.
self.client.force_login(self.superuser)
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
self.assertRegex(response.url, "http://(testserver|example.com)/dummy/foo/")
def test_has_module_permission(self):
"""
has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.force_login(self.superuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
def test_overriding_has_module_permission(self):
"""
If has_module_permission() always returns False, the module shouldn't
be displayed on the admin index page for any users.
"""
articles = Article._meta.verbose_name_plural.title()
sections = Section._meta.verbose_name_plural.title()
index_url = reverse("admin7:index")
self.client.force_login(self.superuser)
response = self.client.get(index_url)
self.assertContains(response, sections)
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(index_url)
self.assertNotContains(response, articles)
# The app list displays Sections but not Articles as the latter has
# ModelAdmin.has_module_permission() = False.
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin7:app_list", args=("admin_views",)))
self.assertContains(response, sections)
self.assertNotContains(response, articles)
def test_post_save_message_no_forbidden_links_visible(self):
"""
Post-save message shouldn't contain a link to the change form if the
user doesn't have the change permission.
"""
self.client.force_login(self.adduser)
# Emulate Article creation for user with add-only permission.
post_data = {
"title": "Fun & games",
"content": "Some content",
"date_0": "2015-10-31",
"date_1": "16:35:00",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_article_add"), post_data, follow=True
)
self.assertContains(
response,
'<li class="success">The article “Fun & games” was added successfully.'
"</li>",
html=True,
)
@override_settings(
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminViewProxyModelPermissionsTests(TestCase):
"""Tests for proxy models permissions in the admin."""
@classmethod
def setUpTestData(cls):
cls.viewuser = User.objects.create_user(
username="viewuser", password="secret", is_staff=True
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
# Setup permissions.
opts = UserProxy._meta
cls.viewuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("view", opts))
)
cls.adduser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("add", opts))
)
cls.changeuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("change", opts))
)
cls.deleteuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("delete", opts))
)
# UserProxy instances.
cls.user_proxy = UserProxy.objects.create(
username="user_proxy", password="secret"
)
def test_add(self):
self.client.force_login(self.adduser)
url = reverse("admin:admin_views_userproxy_add")
data = {
"username": "can_add",
"password": "secret",
"date_joined_0": "2019-01-15",
"date_joined_1": "16:59:10",
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(UserProxy.objects.filter(username="can_add").exists())
def test_view(self):
self.client.force_login(self.viewuser)
response = self.client.get(reverse("admin:admin_views_userproxy_changelist"))
self.assertContains(response, "<h1>Select user proxy to view</h1>")
response = self.client.get(
reverse("admin:admin_views_userproxy_change", args=(self.user_proxy.pk,))
)
self.assertContains(response, "<h1>View user proxy</h1>")
self.assertContains(response, '<div class="readonly">user_proxy</div>')
def test_change(self):
self.client.force_login(self.changeuser)
data = {
"password": self.user_proxy.password,
"username": self.user_proxy.username,
"date_joined_0": self.user_proxy.date_joined.strftime("%Y-%m-%d"),
"date_joined_1": self.user_proxy.date_joined.strftime("%H:%M:%S"),
"first_name": "first_name",
}
url = reverse("admin:admin_views_userproxy_change", args=(self.user_proxy.pk,))
response = self.client.post(url, data)
self.assertRedirects(
response, reverse("admin:admin_views_userproxy_changelist")
)
self.assertEqual(
UserProxy.objects.get(pk=self.user_proxy.pk).first_name, "first_name"
)
def test_delete(self):
self.client.force_login(self.deleteuser)
url = reverse("admin:admin_views_userproxy_delete", args=(self.user_proxy.pk,))
response = self.client.post(url, {"post": "yes"}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists())
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
# User who can change Reports
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.changeuser.user_permissions.add(
get_perm(Report, get_permission_codename("change", Report._meta))
)
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.force_login(self.changeuser)
r = self.client.get(reverse("admin:index"))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.post(reverse("admin:logout"))
@skipUnlessDBFeature("can_defer_constraint_checks")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.v1 = Villain.objects.create(name="Adam")
cls.v2 = Villain.objects.create(name="Sue")
cls.sv1 = SuperVillain.objects.create(name="Bob")
cls.pl1 = Plot.objects.create(
name="World Domination", team_leader=cls.v1, contact=cls.v2
)
cls.pl2 = Plot.objects.create(
name="World Peace", team_leader=cls.v2, contact=cls.v2
)
cls.pl3 = Plot.objects.create(
name="Corn Conspiracy", team_leader=cls.v1, contact=cls.v1
)
cls.pd1 = PlotDetails.objects.create(details="almost finished", plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(
location="underground bunker", villain=cls.v1
)
cls.sh2 = SecretHideout.objects.create(
location="floating castle", villain=cls.sv1
)
cls.ssh1 = SuperSecretHideout.objects.create(
location="super floating castle!", supervillain=cls.sv1
)
cls.cy1 = CyclicOne.objects.create(name="I am recursive", two_id=1)
cls.cy2 = CyclicTwo.objects.create(name="I am recursive too", one_id=1)
def setUp(self):
self.client.force_login(self.superuser)
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>'
% (
reverse("admin:admin_views_plot_change", args=(self.pl1.pk,)),
reverse("admin:admin_views_plotdetails_change", args=(self.pd1.pk,)),
)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertRegex(response.content.decode(), pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse("admin:admin_views_cyclicone_change", args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse("admin:admin_views_cyclictwo_change", args=(self.cy2.pk,)),
)
response = self.client.get(
reverse("admin:admin_views_cyclicone_delete", args=(self.cy1.pk,))
)
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username="deleteuser")
delete_user.user_permissions.add(
get_perm(Plot, get_permission_codename("delete", Plot._meta))
)
self.client.force_login(self.deleteuser)
response = self.client.get(
reverse("admin:admin_views_plot_delete", args=(self.pl1.pk,))
)
self.assertContains(
response,
"your account doesn't have permission to delete the following types of "
"objects",
)
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(
reverse("admin:admin_views_question_delete", args=(q.pk,))
)
self.assertContains(
response, "would require deleting the following protected related objects"
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>'
% reverse("admin:admin_views_answer_change", args=(a1.pk,)),
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>'
% reverse("admin:admin_views_answer_change", args=(a2.pk,)),
)
def test_post_delete_protected(self):
"""
A POST request to delete protected objects should display the page
which says the deletion is prohibited.
"""
q = Question.objects.create(question="Why?")
Answer.objects.create(question=q, answer="Because.")
response = self.client.post(
reverse("admin:admin_views_question_delete", args=(q.pk,)), {"post": "yes"}
)
self.assertEqual(Question.objects.count(), 1)
self.assertContains(
response, "would require deleting the following protected related objects"
)
def test_restricted(self):
album = Album.objects.create(title="Amaryllis")
song = Song.objects.create(album=album, name="Unity")
response = self.client.get(
reverse("admin:admin_views_album_delete", args=(album.pk,))
)
self.assertContains(
response,
"would require deleting the following protected related objects",
)
self.assertContains(
response,
'<li>Song: <a href="%s">Unity</a></li>'
% reverse("admin:admin_views_song_change", args=(song.pk,)),
)
def test_post_delete_restricted(self):
album = Album.objects.create(title="Amaryllis")
Song.objects.create(album=album, name="Unity")
response = self.client.post(
reverse("admin:admin_views_album_delete", args=(album.pk,)),
{"post": "yes"},
)
self.assertEqual(Album.objects.count(), 1)
self.assertContains(
response,
"would require deleting the following protected related objects",
)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
"admin:admin_views_plot_change", args=(self.pl1.pk,)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertContains(response, should_contain)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v2.pk,))
)
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
"admin:admin_views_plot_change", args=(self.pl2.pk,)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v2.pk,))
)
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>'
% reverse("admin:admin_views_villain_change", args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>'
% reverse("admin:admin_views_supervillain_change", args=(self.sv1.pk,)),
"<li>Secret hideout: floating castle",
"<li>Super secret hideout: super floating castle!",
]
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.sv1.pk,))
)
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(
reverse("admin:admin_views_supervillain_delete", args=(self.sv1.pk,))
)
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name="hott")
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
"admin:admin_views_funkytag_change", args=(tag.id,)
)
response = self.client.get(
reverse("admin:admin_views_plot_delete", args=(plot.pk,))
)
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name="djangoproject")
tag = FunkyTag.objects.create(content_object=bookmark, name="django")
tag_url = reverse("admin:admin_views_funkytag_change", args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(
reverse("admin:admin_views_bookmark_delete", args=(bookmark.pk,))
)
self.assertContains(response, should_contain)
def test_delete_view_uses_get_deleted_objects(self):
"""The delete view uses ModelAdmin.get_deleted_objects()."""
book = Book.objects.create(name="Test Book")
response = self.client.get(
reverse("admin2:admin_views_book_delete", args=(book.pk,))
)
# BookAdmin.get_deleted_objects() returns custom text.
self.assertContains(response, "a deletable object")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.v1 = Villain.objects.create(name="Adam")
cls.pl3 = Plot.objects.create(
name="Corn Conspiracy", team_leader=cls.v1, contact=cls.v1
)
def setUp(self):
self.client.force_login(self.superuser)
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name="hott")
response = self.client.get(reverse("admin:admin_views_funkytag_changelist"))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(
ModelWithStringPrimaryKey
).pk
user_pk = cls.superuser.pk
LogEntry.objects.log_action(
user_pk,
content_type_pk,
cls.pk,
cls.pk,
2,
change_message="Changed something",
)
def setUp(self):
self.client.force_login(self.superuser)
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_history", args=(self.pk,)
)
)
self.assertContains(response, escape(self.pk))
self.assertContains(response, "Changed something")
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=(self.pk,)
)
)
self.assertContains(response, escape(self.pk))
def test_changelist_to_changeform_link(self):
"""
Link to the changeform of the object in changelist should use reverse()
and be quoted.
"""
response = self.client.get(
reverse("admin:admin_views_modelwithstringprimarykey_changelist")
)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=("__fk__",)
).replace("__fk__", pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (
change_url,
escape(self.pk),
)
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"""
The link from the recent actions list referring to the changeform of
the object should be quoted.
"""
response = self.client.get(reverse("admin:index"))
link = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=(quote(self.pk),)
)
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_deleteconfirmation_link(self):
""" "
The link from the delete confirmation page referring back to the
changeform of the object should be quoted.
"""
url = reverse(
"admin:admin_views_modelwithstringprimarykey_delete", args=(quote(self.pk),)
)
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=("__fk__",)
).replace("__fk__", escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(
pk="i have something to add"
)
add_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(add_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse("admin:admin_views_modelwithstringprimarykey_add")
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(add_model2.pk),),
)
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(delete_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(history_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk="abc_123")
model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(model.pk),),
)
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse(
"admin:%s_modelwithstringprimarykey_change"
% ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse(
"admin:%s_modelwithstringprimarykey_history"
% ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),),
)
self.assertContains(
response, '<a href="%s" class="historylink"' % escape(expected_link)
)
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse("admin:admin_views_modelwithstringprimarykey_add"),
{
"string_pk": "123/history",
"_continue": "1", # Save and continue editing
},
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn("/123_2Fhistory/", response.headers["location"]) # PK is quoted
@override_settings(ROOT_URLCONF="admin_views.urls")
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
def test_secure_view_shows_login_if_not_logged_in(self):
secure_url = reverse("secure_view")
response = self.client.get(secure_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), secure_url)
)
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, "admin/login.html")
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = "/test_admin/admin/secure-view2/"
response = self.client.get(secure_url)
self.assertRedirects(
response, "%s?myfield=%s" % (reverse("admin:login"), secure_url)
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.b1 = Book.objects.create(name="Lærdommer")
cls.p1 = Promo.objects.create(name="<Promo for Lærdommer>", book=cls.b1)
cls.chap1 = Chapter.objects.create(
title="Norske bostaver æøå skaper problemer",
content="<p>Svært frustrerende med UnicodeDecodeErro</p>",
book=cls.b1,
)
cls.chap2 = Chapter.objects.create(
title="Kjærlighet",
content="<p>La kjærligheten til de lidende seire.</p>",
book=cls.b1,
)
cls.chap3 = Chapter.objects.create(
title="Kjærlighet", content="<p>Noe innhold</p>", book=cls.b1
)
cls.chap4 = ChapterXtra1.objects.create(
chap=cls.chap1, xtra="<Xtra(1) Norske bostaver æøå skaper problemer>"
)
cls.chap5 = ChapterXtra1.objects.create(
chap=cls.chap2, xtra="<Xtra(1) Kjærlighet>"
)
cls.chap6 = ChapterXtra1.objects.create(
chap=cls.chap3, xtra="<Xtra(1) Kjærlighet>"
)
cls.chap7 = ChapterXtra2.objects.create(
chap=cls.chap1, xtra="<Xtra(2) Norske bostaver æøå skaper problemer>"
)
cls.chap8 = ChapterXtra2.objects.create(
chap=cls.chap2, xtra="<Xtra(2) Kjærlighet>"
)
cls.chap9 = ChapterXtra2.objects.create(
chap=cls.chap3, xtra="<Xtra(2) Kjærlighet>"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": (
"<p>Svært frustrerende med UnicodeDecodeError</p>"
),
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": (
"<p>La kjærligheten til de lidende seire.</p>"
),
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(
reverse("admin:admin_views_book_change", args=(self.b1.pk,)), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
The delete_view handles non-ASCII characters
"""
delete_dict = {"post": "yes"}
delete_url = reverse("admin:admin_views_book_delete", args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse("admin:admin_views_book_changelist"))
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_inheritance(self):
Podcast.objects.create(
name="This Week in Django", release_date=datetime.date.today()
)
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse("admin:admin_views_vodcast_changelist"))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso="en", name="English", english_name="English")
response = self.client.get(reverse("admin:admin_views_language_changelist"))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse("admin:admin_views_person_changelist"))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 2
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 2 + 1 = 20 inputs
self.assertContains(response, "<input", count=21)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data, follow=True
)
self.assertEqual(len(response.context["messages"]), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": str(self.per3.pk),
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(
reverse("admin:admin_views_person_changelist") + "?gender__exact=1", data
)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(
reverse("admin:admin_views_person_changelist") + "?q=john", data
)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
"""
Non-field errors are displayed for each of the forms in the
changelist's formset.
"""
fd1 = FoodDelivery.objects.create(
reference="123", driver="bill", restaurant="thai"
)
fd2 = FoodDelivery.objects.create(
reference="456", driver="bill", restaurant="india"
)
fd3 = FoodDelivery.objects.create(
reference="789", driver="bill", restaurant="pizza"
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_fooddelivery_changelist"), data
)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
"with this Driver and Restaurant already exists.</li></ul></td></tr>",
1,
html=True,
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_fooddelivery_changelist"), data
)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
"with this Driver and Restaurant already exists.</li></ul></td></tr>",
2,
html=True,
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data
)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data
)
non_form_errors = response.context["cl"].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(
str(non_form_errors),
str(ErrorList(["Grace is not a Zombie"], error_class="nonform")),
)
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_category_changelist"), data
)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Pagination works for list_editable items.
"""
UnorderedObject.objects.create(id=1, name="Unordered object #1")
UnorderedObject.objects.create(id=2, name="Unordered object #2")
UnorderedObject.objects.create(id=3, name="Unordered object #3")
response = self.client.get(
reverse("admin:admin_views_unorderedobject_changelist")
)
self.assertContains(response, "Unordered object #3")
self.assertContains(response, "Unordered object #2")
self.assertNotContains(response, "Unordered object #1")
response = self.client.get(
reverse("admin:admin_views_unorderedobject_changelist") + "?p=2"
)
self.assertNotContains(response, "Unordered object #3")
self.assertNotContains(response, "Unordered object #2")
self.assertContains(response, "Unordered object #1")
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ["3"],
"action": ["", "delete_selected"],
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
"_selected_action": ["1"],
"action": ["", "delete_selected"],
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse("admin:admin_views_person_changelist"))
self.assertNotEqual(response.context["cl"].list_editable, ())
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?%s" % IS_POPUP_VAR
)
self.assertEqual(response.context["cl"].list_editable, ())
def test_pk_hidden_fields(self):
"""
hidden pk fields aren't displayed in the table body and their
corresponding human-readable value is displayed instead. The hidden pk
fields are displayed but separately (not in the table) and only once.
"""
story1 = Story.objects.create(
title="The adventures of Guido", content="Once upon a time in Djangoland..."
)
story2 = Story.objects.create(
title="Crouching Tiger, Hidden Python",
content="The Python was sneaking into...",
)
response = self.client.get(reverse("admin:admin_views_story_changelist"))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n'
"</div>" % (story2.id, story1.id),
html=True,
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
"""Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title="The adventures of Guido",
content="Once upon a time in Djangoland...",
)
story2 = OtherStory.objects.create(
title="Crouching Tiger, Hidden Python",
content="The Python was sneaking into...",
)
link1 = reverse("admin:admin_views_otherstory_change", args=(story1.pk,))
link2 = reverse("admin:admin_views_otherstory_change", args=(story2.pk,))
response = self.client.get(reverse("admin:admin_views_otherstory_changelist"))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n'
"</div>" % (story2.id, story1.id),
html=True,
)
self.assertContains(
response,
'<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id),
1,
)
self.assertContains(
response,
'<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id),
1,
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
Person.objects.create(name="John Doe", gender=1)
Person.objects.create(name='John O"Hara', gender=1)
Person.objects.create(name="John O'Hara", gender=1)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(the_recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(the_recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text="Bar")
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text="Foo")
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text="Few")
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text="Bas")
def setUp(self):
self.client.force_login(self.superuser)
def test_search_on_sibling_models(self):
"A search that mentions sibling models"
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
The to_field GET parameter is preserved when a search is performed.
Refs #10918.
"""
response = self.client.get(
reverse("admin:auth_user_changelist") + "?q=joe&%s=id" % TO_FIELD_VAR
)
self.assertContains(response, "\n1 user\n")
self.assertContains(
response,
'<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR,
html=True,
)
def test_exact_matches(self):
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=ba"
)
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=Gui"
)
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=uido"
)
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(
reverse("admin:admin_views_pluggablesearchperson_changelist") + "?q=Bob"
)
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(
reverse("admin:admin_views_pluggablesearchperson_changelist") + "?q=20"
)
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=Gui"
)
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">6 total</a>)</span>""",
html=True,
)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True,
)
self.assertTrue(response.context["cl"].show_admin_actions)
def test_search_with_spaces(self):
url = reverse("admin:admin_views_person_changelist") + "?q=%s"
tests = [
('"John Doe"', 1),
("'John Doe'", 1),
("John Doe", 0),
('"John Doe" John', 1),
("'John Doe' John", 1),
("John Doe John", 0),
('"John Do"', 1),
("'John Do'", 1),
("'John O'Hara'", 0),
("'John O\\'Hara'", 1),
('"John O"Hara"', 0),
('"John O\\"Hara"', 1),
]
for search, hits in tests:
with self.subTest(search=search):
response = self.client.get(url % search)
self.assertContains(response, "\n%s person" % hits)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_inline(self):
"""
Inline models which inherit from a common parent are correctly handled.
"""
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse("admin:admin_views_persona_add"))
names = name_re.findall(response.content)
names.remove(b"csrfmiddlewaretoken")
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse("admin:admin_views_persona_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(
reverse("admin:admin_views_persona_change", args=(persona_id,))
)
names = name_re.findall(response.content)
names.remove(b"csrfmiddlewaretoken")
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(
reverse("admin:admin_views_persona_change", args=(persona_id,)), post_data
)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse("admin:admin_views_gadget_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse("admin:admin_views_gadget_changelist"))
# Data is still not visible on the page
response = self.client.get(reverse("admin:admin_views_gadget_changelist"))
self.assertNotContains(response, "First Gadget")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse("admin:admin_views_parent_add"))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.pks = [EmptyModel.objects.create().id for i in range(3)]
def setUp(self):
self.client.force_login(self.superuser)
self.super_login = {
REDIRECT_FIELD_NAME: reverse("admin:index"),
"username": "super",
"password": "secret",
}
def test_changelist_view(self):
response = self.client.get(reverse("admin:admin_views_emptymodel_changelist"))
for i in self.pks:
if i > 1:
self.assertContains(response, "Primary key = %s" % i)
else:
self.assertNotContains(response, "Primary key = %s" % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name="person1", gender=1)
Person.objects.create(name="person2", gender=2)
changelist_url = reverse("admin:admin_views_person_changelist")
# 5 queries are expected: 1 for the session, 1 for the user,
# 2 for the counts and 1 for the objects on the page
with self.assertNumQueries(5):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context["selection_note"], "0 of 2 selected")
self.assertEqual(resp.context["selection_note_all"], "All 2 selected")
with self.assertNumQueries(5):
extra = {"q": "not_in_name"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 0 selected")
self.assertEqual(resp.context["selection_note_all"], "All 0 selected")
with self.assertNumQueries(5):
extra = {"q": "person"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 2 selected")
self.assertEqual(resp.context["selection_note_all"], "All 2 selected")
with self.assertNumQueries(5):
extra = {"gender__exact": "1"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 1 selected")
self.assertEqual(resp.context["selection_note_all"], "1 selected")
def test_change_view(self):
for i in self.pks:
url = reverse("admin:admin_views_emptymodel_change", args=(i,))
response = self.client.get(url, follow=True)
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["empty model with ID “1” doesn’t exist. Perhaps it was deleted?"],
)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_coverletter_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = CoverLetter.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The cover letter “<a href="%s">'
"Candidate, Best</a>” was added successfully.</li>"
% reverse("admin:admin_views_coverletter_change", args=(pk,)),
html=True,
)
# model has no __str__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_shortmessage_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
sm = ShortMessage.objects.all()[0]
self.assertContains(
response,
'<li class="success">The short message “<a href="%s">'
"%s</a>” was added successfully.</li>"
% (reverse("admin:admin_views_shortmessage_change", args=(sm.pk,)), sm),
html=True,
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_telegram_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Telegram.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The telegram “<a href="%s">'
"Urgent telegram</a>” was added successfully.</li>"
% reverse("admin:admin_views_telegram_change", args=(pk,)),
html=True,
)
# model has no __str__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_paper_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
p = Paper.objects.all()[0]
self.assertContains(
response,
'<li class="success">The paper “<a href="%s">'
"%s</a>” was added successfully.</li>"
% (reverse("admin:admin_views_paper_change", args=(p.pk,)), p),
html=True,
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_coverletter_change", args=(cl.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
url = reverse("admin:admin_views_coverletter_change", args=(cl.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The cover letter “<a href="%s">'
"John Doe II</a>” was changed successfully.</li>"
% reverse("admin:admin_views_coverletter_change", args=(cl.pk,)),
html=True,
)
# model has no __str__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_shortmessage_change", args=(sm.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
url = reverse("admin:admin_views_shortmessage_change", args=(sm.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The short message “<a href="%s">'
"%s</a>” was changed successfully.</li>"
% (reverse("admin:admin_views_shortmessage_change", args=(sm.pk,)), sm),
html=True,
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
t = Telegram.objects.create(title="First Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_telegram_change", args=(t.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_telegram_change", args=(t.pk,)),
post_data,
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The telegram “<a href="%s">'
"Telegram without typo</a>” was changed successfully.</li>"
% reverse("admin:admin_views_telegram_change", args=(t.pk,)),
html=True,
)
# model has no __str__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_paper_change", args=(p.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_paper_change", args=(p.pk,)),
post_data,
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The paper “<a href="%s">'
"%s</a>” was changed successfully.</li>"
% (reverse("admin:admin_views_paper_change", args=(p.pk,)), p),
html=True,
)
def test_history_view_custom_qs(self):
"""
Custom querysets are considered for the admin history view.
"""
self.client.post(reverse("admin:login"), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(
reverse("admin:admin_views_filteredmanager_changelist")
)
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(
reverse("admin:admin_views_filteredmanager_history", args=(1,))
).status_code,
200,
)
self.assertEqual(
self.client.get(
reverse("admin:admin_views_filteredmanager_history", args=(2,))
).status_code,
200,
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b"a" * (2**21))
filename = file1.name
file1.close()
cls.gallery = Gallery.objects.create(name="Test Gallery")
cls.picture = Picture.objects.create(
name="Test Picture",
image=filename,
gallery=cls.gallery,
)
def setUp(self):
self.client.force_login(self.superuser)
def test_form_has_multipart_enctype(self):
response = self.client.get(
reverse("admin:admin_views_gallery_change", args=(self.gallery.id,))
)
self.assertIs(response.context["has_file_field"], True)
self.assertContains(response, MULTIPART_ENCTYPE)
def test_inline_file_upload_edit_validation_error_post(self):
"""
Inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": str(self.picture.id),
"pictures-0-gallery": str(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse("admin:admin_views_gallery_change", args=(self.gallery.id,)),
post_data,
)
self.assertContains(response, b"Currently")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.collector = Collector.objects.create(pk=1, name="John Fowles")
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
self.client.force_login(self.superuser)
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data["widget_set-0-name"] = "Widget 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# No file or image fields, no enctype on the forms
self.assertIs(response.context["has_file_field"], False)
self.assertNotContains(response, MULTIPART_ENCTYPE)
# Now resave that inline
self.post_data["widget_set-INITIAL_FORMS"] = "1"
self.post_data["widget_set-0-id"] = str(widget_id)
self.post_data["widget_set-0-name"] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data["widget_set-INITIAL_FORMS"] = "1"
self.post_data["widget_set-0-id"] = str(widget_id)
self.post_data["widget_set-0-name"] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"""
A model with an explicit autofield primary key can be saved as inlines.
"""
# First add a new inline
self.post_data["grommet_set-0-name"] = "Grommet 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data["grommet_set-INITIAL_FORMS"] = "1"
self.post_data["grommet_set-0-code"] = str(Grommet.objects.all()[0].code)
self.post_data["grommet_set-0-name"] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data["grommet_set-INITIAL_FORMS"] = "1"
self.post_data["grommet_set-0-code"] = str(Grommet.objects.all()[0].code)
self.post_data["grommet_set-0-name"] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data["doohickey_set-INITIAL_FORMS"] = "1"
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data["doohickey_set-INITIAL_FORMS"] = "1"
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data["whatsit_set-INITIAL_FORMS"] = "1"
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data["whatsit_set-INITIAL_FORMS"] = "1"
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data["fancydoodad_set-INITIAL_FORMS"] = "1"
self.post_data["fancydoodad_set-0-doodad_ptr"] = str(doodad_pk)
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data["fancydoodad_set-INITIAL_FORMS"] = "1"
self.post_data["fancydoodad_set-0-doodad_ptr"] = str(doodad_pk)
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""
An inline with an editable ordering fields is updated correctly.
"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update(
{
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
}
)
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(ROOT_URLCONF="admin_views.urls")
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
def setUp(self):
self.client.force_login(self.superuser)
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse("admin:index"))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse("admin:admin_views_fabric_add"))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(
reverse("admin:admin_views_section_history", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse("admin:index"))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.post(reverse("admin:logout"))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse("admin:password_change"))
self.assertIsNone(get_max_age(response))
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse("admin:password_change_done"))
self.assertIsNone(get_max_age(response))
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse("admin:jsi18n"))
self.assertIsNone(get_max_age(response))
@override_settings(ROOT_URLCONF="admin_views.urls")
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_prepopulated_on(self):
response = self.client.get(reverse("admin:admin_views_prepopulatedpost_add"))
self.assertContains(response, ""id": "#id_slug"")
self.assertContains(
response, ""dependency_ids": ["#id_title"]"
)
self.assertContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug"",
)
def test_prepopulated_off(self):
response = self.client.get(
reverse("admin:admin_views_prepopulatedpost_change", args=(self.p1.pk,))
)
self.assertContains(response, "A Long Title")
self.assertNotContains(response, ""id": "#id_slug"")
self.assertNotContains(
response, ""dependency_ids": ["#id_title"]"
)
self.assertNotContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug"",
)
@override_settings(USE_THOUSAND_SEPARATOR=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(
reverse("admin:admin_views_prepopulatedpostlargeslug_add")
)
self.assertContains(response, ""maxLength": 1000") # instead of 1,000
def test_view_only_add_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'
which is present in the add view, even if the
ModelAdmin.has_change_permission() returns False.
"""
response = self.client.get(reverse("admin7:admin_views_prepopulatedpost_add"))
self.assertContains(response, "data-prepopulated-fields=")
self.assertContains(response, ""id": "#id_slug"")
def test_view_only_change_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That
doesn't break a view-only change view.
"""
response = self.client.get(
reverse("admin7:admin_views_prepopulatedpost_change", args=(self.p1.pk,))
)
self.assertContains(response, 'data-prepopulated-fields="[]"')
self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug)
@override_settings(ROOT_URLCONF="admin_views.urls")
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ["admin_views"] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
self.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def test_login_button_centered(self):
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + reverse("admin:login"))
button = self.selenium.find_element(By.CSS_SELECTOR, ".submit-row input")
offset_left = button.get_property("offsetLeft")
offset_right = button.get_property("offsetParent").get_property(
"offsetWidth"
) - (offset_left + button.get_property("offsetWidth"))
# Use assertAlmostEqual to avoid pixel rounding errors.
self.assertAlmostEqual(offset_left, offset_right, delta=3)
def test_prepopulated_fields(self):
"""
The JavaScript-automated prepopulated fields work with the main form
and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_mainprepopulated_add")
)
self.wait_for(".select2")
# Main form ----------------------------------------------------------
self.selenium.find_element(By.ID, "id_pubdate").send_keys("2012-02-18")
self.select_option("#id_status", "option two")
self.selenium.find_element(By.ID, "id_name").send_keys(
" the mAin nÀMë and it's awεšomeıııİ"
)
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
slug3 = self.selenium.find_element(By.ID, "id_slug3").get_attribute("value")
self.assertEqual(slug1, "the-main-name-and-its-awesomeiiii-2012-02-18")
self.assertEqual(slug2, "option-two-the-main-name-and-its-awesomeiiii")
self.assertEqual(
slug3, "the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131i"
)
# Stacked inlines with fieldsets -------------------------------------
# Initial inline
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-pubdate"
).send_keys("2011-12-17")
self.select_option("#id_relatedprepopulated_set-0-status", "option one")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-name"
).send_keys(" here is a sŤāÇkeð inline ! ")
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-slug2"
).get_attribute("value")
self.assertEqual(slug1, "here-is-a-stacked-inline-2011-12-17")
self.assertEqual(slug2, "option-one-here-is-a-stacked-inline")
initial_select2_inputs = self.selenium.find_elements(
By.CLASS_NAME, "select2-selection"
)
# Inline formsets have empty/invisible forms.
# Only the 4 visible select2 inputs are initialized.
num_initial_select2_inputs = len(initial_select2_inputs)
self.assertEqual(num_initial_select2_inputs, 4)
# Add an inline
self.selenium.find_elements(By.LINK_TEXT, "Add another Related prepopulated")[
0
].click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 2,
)
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-pubdate"
).send_keys("1999-01-25")
self.select_option("#id_relatedprepopulated_set-1-status", "option two")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-name"
).send_keys(
" now you haVe anöther sŤāÇkeð inline with a very ... "
"loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog "
"text... "
)
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-slug2"
).get_attribute("value")
# 50 characters maximum for slug1 field
self.assertEqual(slug1, "now-you-have-another-stacked-inline-with-a-very-lo")
# 60 characters maximum for slug2 field
self.assertEqual(
slug2, "option-two-now-you-have-another-stacked-inline-with-a-very-l"
)
# Tabular inlines ----------------------------------------------------
# Initial inline
element = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-status"
)
self.selenium.execute_script("window.scrollTo(0, %s);" % element.location["y"])
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-pubdate"
).send_keys("1234-12-07")
self.select_option("#id_relatedprepopulated_set-2-0-status", "option two")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-name"
).send_keys("And now, with a tÃbűlaŘ inline !!!")
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-slug2"
).get_attribute("value")
self.assertEqual(slug1, "and-now-with-a-tabular-inline-1234-12-07")
self.assertEqual(slug2, "option-two-and-now-with-a-tabular-inline")
# Add an inline
# Button may be outside the browser frame.
element = self.selenium.find_elements(
By.LINK_TEXT, "Add another Related prepopulated"
)[1]
self.selenium.execute_script("window.scrollTo(0, %s);" % element.location["y"])
element.click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 4,
)
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-pubdate"
).send_keys("1981-08-22")
self.select_option("#id_relatedprepopulated_set-2-1-status", "option one")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-name"
).send_keys(r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters')
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-slug2"
).get_attribute("value")
self.assertEqual(slug1, "tabular-inline-with-ignored-characters-1981-08-22")
self.assertEqual(slug2, "option-one-tabular-inline-with-ignored-characters")
# Add an inline without an initial inline.
# The button is outside of the browser frame.
self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);")
self.selenium.find_elements(By.LINK_TEXT, "Add another Related prepopulated")[
2
].click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 6,
)
# Stacked Inlines without fieldsets ----------------------------------
# Initial inline.
row_id = "id_relatedprepopulated_set-4-0-"
self.selenium.find_element(By.ID, f"{row_id}pubdate").send_keys("2011-12-12")
self.select_option(f"#{row_id}status", "option one")
self.selenium.find_element(By.ID, f"{row_id}name").send_keys(
" sŤāÇkeð inline ! "
)
slug1 = self.selenium.find_element(By.ID, f"{row_id}slug1").get_attribute(
"value"
)
slug2 = self.selenium.find_element(By.ID, f"{row_id}slug2").get_attribute(
"value"
)
self.assertEqual(slug1, "stacked-inline-2011-12-12")
self.assertEqual(slug2, "option-one")
# Add inline.
self.selenium.find_elements(
By.LINK_TEXT,
"Add another Related prepopulated",
)[3].click()
row_id = "id_relatedprepopulated_set-4-1-"
self.selenium.find_element(By.ID, f"{row_id}pubdate").send_keys("1999-01-20")
self.select_option(f"#{row_id}status", "option two")
self.selenium.find_element(By.ID, f"{row_id}name").send_keys(
" now you haVe anöther sŤāÇkeð inline with a very loooong "
)
slug1 = self.selenium.find_element(By.ID, f"{row_id}slug1").get_attribute(
"value"
)
slug2 = self.selenium.find_element(By.ID, f"{row_id}slug2").get_attribute(
"value"
)
self.assertEqual(slug1, "now-you-have-another-stacked-inline-with-a-very-lo")
self.assertEqual(slug2, "option-two")
# Save and check that everything is properly stored in the database
with self.wait_page_loaded():
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.assertEqual(MainPrepopulated.objects.count(), 1)
MainPrepopulated.objects.get(
name=" the mAin nÀMë and it's awεšomeıııİ",
pubdate="2012-02-18",
status="option two",
slug1="the-main-name-and-its-awesomeiiii-2012-02-18",
slug2="option-two-the-main-name-and-its-awesomeiiii",
slug3="the-main-nàmë-and-its-awεšomeıııi",
)
self.assertEqual(RelatedPrepopulated.objects.count(), 6)
RelatedPrepopulated.objects.get(
name=" here is a sŤāÇkeð inline ! ",
pubdate="2011-12-17",
status="option one",
slug1="here-is-a-stacked-inline-2011-12-17",
slug2="option-one-here-is-a-stacked-inline",
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=(
" now you haVe anöther sŤāÇkeð inline with a very ... "
"loooooooooooooooooo"
),
pubdate="1999-01-25",
status="option two",
slug1="now-you-have-another-stacked-inline-with-a-very-lo",
slug2="option-two-now-you-have-another-stacked-inline-with-a-very-l",
)
RelatedPrepopulated.objects.get(
name="And now, with a tÃbűlaŘ inline !!!",
pubdate="1234-12-07",
status="option two",
slug1="and-now-with-a-tabular-inline-1234-12-07",
slug2="option-two-and-now-with-a-tabular-inline",
)
RelatedPrepopulated.objects.get(
name=r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate="1981-08-22",
status="option one",
slug1="tabular-inline-with-ignored-characters-1981-08-22",
slug2="option-one-tabular-inline-with-ignored-characters",
)
def test_populate_existing_object(self):
"""
The prepopulation works for existing objects too, as long as
the original field is empty (#19082).
"""
from selenium.webdriver.common.by import By
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=" this is the mAin nÀMë",
pubdate="2012-02-18",
status="option two",
slug1="",
slug2="",
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
object_url = self.live_server_url + reverse(
"admin:admin_views_mainprepopulated_change", args=(item.id,)
)
self.selenium.get(object_url)
self.selenium.find_element(By.ID, "id_name").send_keys(" the best")
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
self.assertEqual(slug1, "this-is-the-main-name-the-best-2012-02-18")
self.assertEqual(slug2, "option-two-this-is-the-main-name-the-best")
# Save the object
with self.wait_page_loaded():
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.get(object_url)
self.selenium.find_element(By.ID, "id_name").send_keys(" hello")
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
self.assertEqual(slug1, "this-is-the-main-name-the-best-2012-02-18")
self.assertEqual(slug2, "option-two-this-is-the-main-name-the-best")
def test_collapsible_fieldset(self):
"""
The 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_article_add")
)
self.assertFalse(self.selenium.find_element(By.ID, "id_title").is_displayed())
self.selenium.find_elements(By.LINK_TEXT, "Show")[0].click()
self.assertTrue(self.selenium.find_element(By.ID, "id_title").is_displayed())
self.assertEqual(
self.selenium.find_element(By.ID, "fieldsetcollapser0").text, "Hide"
)
def test_selectbox_height_collapsible_fieldset(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin7:index"),
)
url = self.live_server_url + reverse("admin7:admin_views_pizza_add")
self.selenium.get(url)
self.selenium.find_elements(By.LINK_TEXT, "Show")[0].click()
filter_box = self.selenium.find_element(By.ID, "id_toppings_filter")
from_box = self.selenium.find_element(By.ID, "id_toppings_from")
to_box = self.selenium.find_element(By.ID, "id_toppings_to")
self.assertEqual(
to_box.get_property("offsetHeight"),
(
filter_box.get_property("offsetHeight")
+ from_box.get_property("offsetHeight")
),
)
def test_selectbox_height_not_collapsible_fieldset(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin7:index"),
)
url = self.live_server_url + reverse("admin7:admin_views_question_add")
self.selenium.get(url)
filter_box = self.selenium.find_element(By.ID, "id_related_questions_filter")
from_box = self.selenium.find_element(By.ID, "id_related_questions_from")
to_box = self.selenium.find_element(By.ID, "id_related_questions_to")
self.assertEqual(
to_box.get_property("offsetHeight"),
(
filter_box.get_property("offsetHeight")
+ from_box.get_property("offsetHeight")
),
)
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
from selenium.webdriver.common.by import By
# First form field has a single widget
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
with self.wait_page_loaded():
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_picture_add")
)
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element(By.ID, "id_name"),
)
# First form field has a MultiWidget
with self.wait_page_loaded():
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_reservation_add")
)
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element(By.ID, "id_start_date_0"),
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
from selenium.webdriver.common.by import By
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse("admin:admin_views_pizza_change", args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(full_url)
self.selenium.find_element(By.CLASS_NAME, "deletelink").click()
# Click 'cancel' on the delete page.
self.selenium.find_element(By.CLASS_NAME, "cancel-link").click()
# Wait until we're back on the change page.
self.wait_for_text("#content h1", "Change pizza")
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
from selenium.webdriver.common.by import By
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse("admin:admin_views_pizza_change", args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(full_url)
self.selenium.find_element(By.CLASS_NAME, "deletelink").click()
# Click 'cancel' on the delete page.
self.selenium.find_element(By.CLASS_NAME, "cancel-link").click()
# Wait until we're back on the change page.
self.wait_for_text("#content h1", "Change pizza")
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name="Test section")
Article.objects.create(
title="foo",
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_article_changelist")
)
# Change popup
self.selenium.find_element(By.ID, "change_id_form-0-section").click()
self.wait_for_and_switch_to_popup()
self.wait_for_text("#content h1", "Change section")
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.clear()
name_input.send_keys("<i>edited section</i>")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# Hide sidebar.
toggle_button = self.selenium.find_element(
By.CSS_SELECTOR, "#toggle-nav-sidebar"
)
toggle_button.click()
select = Select(self.selenium.find_element(By.ID, "id_form-0-section"))
self.assertEqual(select.first_selected_option.text, "<i>edited section</i>")
# Rendered select2 input.
select2_display = self.selenium.find_element(
By.CLASS_NAME, "select2-selection__rendered"
)
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, "×\n<i>edited section</i>")
# Add popup
self.selenium.find_element(By.ID, "add_id_form-0-section").click()
self.wait_for_and_switch_to_popup()
self.wait_for_text("#content h1", "Add section")
self.selenium.find_element(By.ID, "id_name").send_keys("new section")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_form-0-section"))
self.assertEqual(select.first_selected_option.text, "new section")
select2_display = self.selenium.find_element(
By.CLASS_NAME, "select2-selection__rendered"
)
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, "×\nnew section")
def test_inline_uuid_pk_edit_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "change_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
self.assertEqual(select.first_selected_option.text, str(parent.id))
self.assertEqual(
select.first_selected_option.get_attribute("value"), str(parent.id)
)
def test_inline_uuid_pk_add_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url
+ reverse("admin:admin_views_relatedwithuuidpkmodel_add")
)
self.selenium.find_element(By.ID, "add_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
uuid_id = str(ParentWithUUIDPK.objects.first().id)
self.assertEqual(select.first_selected_option.text, uuid_id)
self.assertEqual(select.first_selected_option.get_attribute("value"), uuid_id)
def test_inline_uuid_pk_delete_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "delete_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//input[@value="Yes, I’m sure"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
self.assertEqual(ParentWithUUIDPK.objects.count(), 0)
self.assertEqual(select.first_selected_option.text, "---------")
self.assertEqual(select.first_selected_option.get_attribute("value"), "")
def test_inline_with_popup_cancel_delete(self):
"""Clicking ""No, take me back" on a delete popup closes the window."""
from selenium.webdriver.common.by import By
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "delete_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//a[text()="No, take me back"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertEqual(len(self.selenium.window_handles), 1)
def test_list_editable_raw_id_fields(self):
from selenium.webdriver.common.by import By
parent = ParentWithUUIDPK.objects.create(title="test")
parent2 = ParentWithUUIDPK.objects.create(title="test2")
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_changelist",
current_app=site2.name,
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "lookup_id_form-0-parent").click()
self.wait_for_and_switch_to_popup()
# Select "parent2" in the popup.
self.selenium.find_element(By.LINK_TEXT, str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element(By.ID, "id_form-0-parent").get_attribute(
"value"
)
self.assertEqual(value, str(parent2.pk))
def test_input_element_font(self):
"""
Browsers' default stylesheets override the font of inputs. The admin
adds additional CSS to handle this.
"""
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + reverse("admin:login"))
element = self.selenium.find_element(By.ID, "id_username")
# Some browsers quotes the fonts, some don't.
fonts = [
font.strip().strip('"')
for font in element.value_of_css_property("font-family").split(",")
]
self.assertEqual(
fonts,
["Roboto", "Lucida Grande", "Verdana", "Arial", "sans-serif"],
)
def test_search_input_filtered_page(self):
from selenium.webdriver.common.by import By
Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
Person.objects.create(name="Grace Hopper", gender=1, alive=False)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
person_url = reverse("admin:admin_views_person_changelist") + "?q=Gui"
self.selenium.get(self.live_server_url + person_url)
self.assertGreater(
self.selenium.find_element(By.ID, "searchbar").rect["width"],
50,
)
def test_related_popup_index(self):
"""
Create a chain of 'self' related objects via popups.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_box_add", current_app=site.name)
self.selenium.get(self.live_server_url + add_url)
base_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup()
popup_window_test = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=3)
popup_window_test2 = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test2")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=4)
self.selenium.find_element(By.ID, "id_title").send_keys("test3")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(popup_window_test2)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test3").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(popup_window_test)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test2").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(base_window)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
def test_related_popup_incorrect_close(self):
"""
Cleanup child popups when closing a parent popup.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_box_add", current_app=site.name)
self.selenium.get(self.live_server_url + add_url)
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup()
test_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=3)
test2_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test2")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=4)
self.assertEqual(len(self.selenium.window_handles), 4)
self.selenium.switch_to.window(test2_window)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.wait_until(lambda d: len(d.window_handles) == 2, 1)
self.assertEqual(len(self.selenium.window_handles), 2)
# Close final popup to clean up test.
self.selenium.switch_to.window(test_window)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.wait_until(lambda d: len(d.window_handles) == 1, 1)
self.selenium.switch_to.window(self.selenium.window_handles[-1])
def test_hidden_fields_small_window(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin:index"),
)
self.selenium.get(self.live_server_url + reverse("admin:admin_views_story_add"))
field_title = self.selenium.find_element(By.CLASS_NAME, "field-title")
current_size = self.selenium.get_window_size()
try:
self.selenium.set_window_size(1024, 768)
self.assertIs(field_title.is_displayed(), False)
self.selenium.set_window_size(767, 575)
self.assertIs(field_title.is_displayed(), False)
finally:
self.selenium.set_window_size(current_size["width"], current_size["height"])
def test_updating_related_objects_updates_fk_selects(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
born_country_select_id = "id_born_country"
living_country_select_id = "id_living_country"
favorite_country_to_vacation_select_id = "id_favorite_country_to_vacation"
continent_select_id = "id_continent"
def _get_HTML_inside_element_by_id(id_):
return self.selenium.find_element(By.ID, id_).get_attribute("innerHTML")
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_traveler_add")
self.selenium.get(self.live_server_url + add_url)
# Add new Country from the born_country select.
self.selenium.find_element(By.ID, f"add_{born_country_select_id}").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_name").send_keys("Argentina")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("South America")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
""",
)
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(living_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1">Argentina</option>
""",
)
# Argentina won't appear because favorite_country_to_vacation field has
# limit_choices_to.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Add new Country from the living_country select.
self.selenium.find_element(By.ID, f"add_{living_country_select_id}").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_name").send_keys("Spain")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("Europe")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
<option value="2">Spain</option>
""",
)
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(living_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1">Argentina</option>
<option value="2" selected="">Spain</option>
""",
)
# Spain won't appear because favorite_country_to_vacation field has
# limit_choices_to.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Edit second Country created from living_country select.
favorite_select = Select(
self.selenium.find_element(By.ID, living_country_select_id)
)
favorite_select.select_by_visible_text("Spain")
self.selenium.find_element(By.ID, f"change_{living_country_select_id}").click()
self.wait_for_and_switch_to_popup()
favorite_name_input = self.selenium.find_element(By.ID, "id_name")
favorite_name_input.clear()
favorite_name_input.send_keys("Italy")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
<option value="2">Italy</option>
""",
)
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(living_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1">Argentina</option>
<option value="2" selected="">Italy</option>
""",
)
# favorite_country_to_vacation field has no options.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Add a new Asian country.
self.selenium.find_element(
By.ID, f"add_{favorite_country_to_vacation_select_id}"
).click()
self.wait_for_and_switch_to_popup()
favorite_name_input = self.selenium.find_element(By.ID, "id_name")
favorite_name_input.send_keys("Qatar")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("Asia")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# Submit the new Traveler.
self.selenium.find_element(By.CSS_SELECTOR, '[name="_save"]').click()
traveler = Traveler.objects.get()
self.assertEqual(traveler.born_country.name, "Argentina")
self.assertEqual(traveler.living_country.name, "Italy")
self.assertEqual(traveler.favorite_country_to_vacation.name, "Qatar")
@override_settings(ROOT_URLCONF="admin_views.urls")
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_readonly_get(self):
response = self.client.get(reverse("admin:admin_views_post_add"))
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
# + 1 logout form.
self.assertContains(response, "<input", count=17)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response, "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Multiline text in a readonly field gets <br> tags
self.assertContains(response, "Multiline<br>test<br>string")
self.assertContains(
response,
'<div class="readonly">Multiline<br>html<br>content</div>',
html=True,
)
self.assertContains(response, "InlineMultiline<br>test<br>string")
self.assertContains(
response,
formats.localize(datetime.date.today() - datetime.timedelta(days=7)),
)
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<div class="help"', 3)
self.assertContains(
response,
'<div class="help" id="id_title_helptext">Some help text for the title '
"(with Unicode ŠĐĆŽćžšđ)</div>",
html=True,
)
self.assertContains(
response,
'<div class="help" id="id_content_helptext">Some help text for the content '
"(with Unicode ŠĐĆŽćžšđ)</div>",
html=True,
)
self.assertContains(
response,
'<div class="help">Some help text for the date (with Unicode ŠĐĆŽćžšđ)'
"</div>",
html=True,
)
p = Post.objects.create(
title="I worked on readonly_fields", content="Its good stuff"
)
response = self.client.get(
reverse("admin:admin_views_post_change", args=(p.pk,))
)
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test",
content="test",
readonly_content="test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest",
)
Link.objects.create(
url="http://www.djangoproject.com",
post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(
reverse("admin:admin_views_post_change", args=(p.pk,))
)
# Checking readonly field.
self.assertContains(response, "test<br><br>test<br><br>test<br><br>test")
# Checking readonly field in inline.
self.assertContains(response, "test<br>link")
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse("admin:admin_views_post_add"), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse("admin:admin_views_post_add"), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by("-id")[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse("admin:admin_views_pizza_add"))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(
reverse("admin2:auth_user_password_change", args=(su.pk,))
)
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(
reverse("admin:admin_views_choice_change", args=(choice.pk,))
)
self.assertContains(
response, '<div class="readonly">No opinion</div>', html=True
)
def _test_readonly_foreignkey_links(self, admin_site):
"""
ForeignKey readonly fields render as links if the target model is
registered in admin.
"""
chapter = Chapter.objects.create(
title="Chapter 1",
content="content",
book=Book.objects.create(name="Book 1"),
)
language = Language.objects.create(iso="_40", name="Test")
obj = ReadOnlyRelatedField.objects.create(
chapter=chapter,
language=language,
user=self.superuser,
)
response = self.client.get(
reverse(
f"{admin_site}:admin_views_readonlyrelatedfield_change", args=(obj.pk,)
),
)
# Related ForeignKey object registered in admin.
user_url = reverse(f"{admin_site}:auth_user_change", args=(self.superuser.pk,))
self.assertContains(
response,
'<div class="readonly"><a href="%s">super</a></div>' % user_url,
html=True,
)
# Related ForeignKey with the string primary key registered in admin.
language_url = reverse(
f"{admin_site}:admin_views_language_change",
args=(quote(language.pk),),
)
self.assertContains(
response,
'<div class="readonly"><a href="%s">_40</a></div>' % language_url,
html=True,
)
# Related ForeignKey object not registered in admin.
self.assertContains(
response, '<div class="readonly">Chapter 1</div>', html=True
)
def test_readonly_foreignkey_links_default_admin_site(self):
self._test_readonly_foreignkey_links("admin")
def test_readonly_foreignkey_links_custom_admin_site(self):
self._test_readonly_foreignkey_links("namespaced_admin")
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name="Salami")
pizza = Pizza.objects.create(name="Americano")
pizza.toppings.add(topping)
response = self.client.get(reverse("admin:admin_views_topping_add"))
self.assertEqual(response.status_code, 200)
def test_readonly_manytomany_forwards_ref(self):
topping = Topping.objects.create(name="Salami")
pizza = Pizza.objects.create(name="Americano")
pizza.toppings.add(topping)
response = self.client.get(
reverse("admin:admin_views_pizza_change", args=(pizza.pk,))
)
self.assertContains(response, "<label>Toppings:</label>", html=True)
self.assertContains(response, '<div class="readonly">Salami</div>', html=True)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name="Adam")
pl = Plot.objects.create(name="Test Plot", team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details="Brand New Plot", plot=pl)
response = self.client.get(
reverse("admin:admin_views_plotproxy_change", args=(pl.pk,))
)
field = self.get_admin_readonly_field(response, "plotdetails")
pd_url = reverse("admin:admin_views_plotdetails_change", args=(pd.pk,))
self.assertEqual(field.contents(), '<a href="%s">Brand New Plot</a>' % pd_url)
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(
reverse("admin:admin_views_plotproxy_change", args=(pl.pk,))
)
field = self.get_admin_readonly_field(response, "plotdetails")
self.assertEqual(field.contents(), "-") # default empty value
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(
reverse("admin:admin_views_fieldoverridepost_change", args=(p.pk,))
)
self.assertContains(
response,
'<div class="help">Overridden help text for the date</div>',
html=True,
)
self.assertContains(
response,
'<label for="id_public">Overridden public label:</label>',
html=True,
)
self.assertNotContains(
response, "Some help text for the date (with Unicode ŠĐĆŽćžšđ)"
)
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name="<a>evil</a>")
response = self.client.get(
reverse("admin:admin_views_section_change", args=(section.pk,))
)
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
def test_label_suffix_translated(self):
pizza = Pizza.objects.create(name="Americano")
url = reverse("admin:admin_views_pizza_change", args=(pizza.pk,))
with self.settings(LANGUAGE_CODE="fr"):
response = self.client.get(url)
self.assertContains(response, "<label>Toppings\u00A0:</label>", html=True)
@override_settings(ROOT_URLCONF="admin_views.urls")
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username="threepwood",
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username="marley",
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse("admin:admin_views_stumpjoke_add"))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(ROOT_URLCONF="admin_views.urls")
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
Inquisition.objects.create(expected=False, leader=actor, country="Spain")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse("admin:admin_views_inquisition_changelist"))
self.assertContains(response, "list-display-sketch")
@override_settings(ROOT_URLCONF="admin_views.urls")
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
},
)
new_user = User.objects.get(username="newuser")
self.assertRedirects(
response, reverse("admin:auth_user_change", args=(new_user.pk,))
)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"_continue": "1",
},
)
new_user = User.objects.get(username="newuser")
new_user_url = reverse("admin:auth_user_change", args=(new_user.pk,))
self.assertRedirects(response, new_user_url, fetch_redirect_response=False)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
response = self.client.get(new_user_url)
self.assertContains(
response,
'<li class="success">The user “<a href="%s">'
"%s</a>” was added successfully. You may edit it again below.</li>"
% (new_user_url, new_user),
html=True,
)
def test_password_mismatch(self):
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "mismatch",
},
)
self.assertEqual(response.status_code, 200)
self.assertFormError(response.context["adminform"], "password1", [])
self.assertFormError(
response.context["adminform"],
"password2",
["The two password fields didn’t match."],
)
def test_user_fk_add_popup(self):
"""
User addition through a FK popup should return the appropriate
JavaScript response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(response, reverse("admin:auth_user_add"))
self.assertContains(
response,
'class="related-widget-wrapper-link add-related" id="add_id_owner"',
)
response = self.client.get(
reverse("admin:auth_user_add") + "?%s=1" % IS_POPUP_VAR
)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
IS_POPUP_VAR: "1",
"_save": "1",
}
response = self.client.post(
reverse("admin:auth_user_add") + "?%s=1" % IS_POPUP_VAR, data, follow=True
)
self.assertContains(response, ""obj": "newuser"")
def test_user_fk_change_popup(self):
"""
User change through a FK popup should return the appropriate JavaScript
response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(
response, reverse("admin:auth_user_change", args=("__fk__",))
)
self.assertContains(
response,
'class="related-widget-wrapper-link change-related" id="change_id_owner"',
)
user = User.objects.get(username="changeuser")
url = (
reverse("admin:auth_user_change", args=(user.pk,)) + "?%s=1" % IS_POPUP_VAR
)
response = self.client.get(url)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"last_login_0": "2007-05-30",
"last_login_1": "13:20:10",
"date_joined_0": "2007-05-30",
"date_joined_1": "13:20:10",
IS_POPUP_VAR: "1",
"_save": "1",
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, ""obj": "newuser"")
self.assertContains(response, ""action": "change"")
def test_user_fk_delete_popup(self):
"""
User deletion through a FK popup should return the appropriate
JavaScript response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(
response, reverse("admin:auth_user_delete", args=("__fk__",))
)
self.assertContains(
response,
'class="related-widget-wrapper-link change-related" id="change_id_owner"',
)
user = User.objects.get(username="changeuser")
url = (
reverse("admin:auth_user_delete", args=(user.pk,)) + "?%s=1" % IS_POPUP_VAR
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
"post": "yes",
IS_POPUP_VAR: "1",
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, ""action": "delete"")
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"_addanother": "1",
},
)
new_user = User.objects.order_by("-id")[0]
self.assertRedirects(response, reverse("admin:auth_user_add"))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
expected_num_queries = 10 if connection.features.uses_savepoints else 8
with self.assertNumQueries(expected_num_queries):
response = self.client.get(reverse("admin:auth_user_change", args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(
reverse("admin3:auth_user_password_change", args=(u.pk,))
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["form_url"], "pony")
@override_settings(ROOT_URLCONF="admin_views.urls")
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(
reverse("admin:auth_group_add"),
{
"name": "newgroup",
},
)
Group.objects.order_by("-id")[0]
self.assertRedirects(response, reverse("admin:auth_group_changelist"))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
expected_num_queries = 8 if connection.features.uses_savepoints else 6
with self.assertNumQueries(expected_num_queries):
response = self.client.get(reverse("admin:auth_group_change", args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF="admin_views.urls")
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_field_prefix_css_classes(self):
"""
Fields have a CSS class name with a 'field-' prefix.
"""
response = self.client.get(reverse("admin:admin_views_post_add"))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
CSS class names are used for each app and model on the admin index
pages (#17050).
"""
# General index page
response = self.client.get(reverse("admin:index"))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse("admin:admin_views_section_changelist"))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [self.s1.pk],
"action": "delete_selected",
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_section_changelist"), action_data
)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their
class attribute.
"""
Podcast.objects.create(name="Django Dose", release_date=datetime.date.today())
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertContains(response, '<th class="field-name">')
self.assertContains(response, '<td class="field-release_date nowrap">')
self.assertContains(response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(ROOT_URLCONF="admin_views.urls")
@modify_settings(
INSTALLED_APPS={"append": ["django.contrib.admindocs", "django.contrib.flatpages"]}
)
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_tags(self):
response = self.client.get(reverse("django-admindocs-tags"))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(
response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#built_in-autoescape">autoescape</a></li>',
html=True,
)
# An app tag exists in both the index and detail
self.assertContains(
response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>',
html=True,
)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(
response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#admin_list-admin_actions">admin_actions</a></li>',
html=True,
)
def test_filters(self):
response = self.client.get(reverse("django-admindocs-filters"))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(
response, '<li><a href="#built_in-add">add</a></li>', html=True
)
@override_settings(
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_lang_name_present(self):
with translation.override(None):
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(ROOT_URLCONF="admin_views.urls", USE_THOUSAND_SEPARATOR=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def assert_non_localized_year(self, response, year):
"""
The year is not localized with USE_THOUSAND_SEPARATOR (#15234).
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % date.year)
def assert_contains_month_link(self, response, date):
self.assertContains(
response,
'?release_date__month=%d&release_date__year=%d"'
% (date.month, date.year),
)
def assert_contains_day_link(self, response, date):
self.assertContains(
response,
"?release_date__day=%d&"
'release_date__month=%d&release_date__year=%d"'
% (date.day, date.month, date.year),
)
def test_empty(self):
"""
No date hierarchy links display with empty changelist.
"""
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertNotContains(response, "release_date__year=")
self.assertNotContains(response, "release_date__month=")
self.assertNotContains(response, "release_date__day=")
def test_single(self):
"""
Single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
day-level links appear for changelist within single month.
"""
DATES = (
datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
month-level links appear for changelist within single year.
"""
DATES = (
datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, "release_date__day=")
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
year-level links appear for year-spanning changelist.
"""
DATES = (
datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
# no day/month-level links
self.assertNotContains(response, "release_date__day=")
self.assertNotContains(response, "release_date__month=")
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = "%s?release_date__year=%d" % (
reverse("admin:admin_views_podcast_changelist"),
date.year,
)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = "%s?release_date__year=%d&release_date__month=%d" % (
reverse("admin:admin_views_podcast_changelist"),
date.year,
date.month,
)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
def test_related_field(self):
questions_data = (
# (posted data, number of answers),
(datetime.date(2001, 1, 30), 0),
(datetime.date(2003, 3, 15), 1),
(datetime.date(2005, 5, 3), 2),
)
for date, answer_count in questions_data:
question = Question.objects.create(posted=date)
for i in range(answer_count):
question.answer_set.create()
response = self.client.get(reverse("admin:admin_views_answer_changelist"))
for date, answer_count in questions_data:
link = '?question__posted__year=%d"' % date.year
if answer_count > 0:
self.assertContains(response, link)
else:
self.assertNotContains(response, link)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminCustomSaveRelatedTests(TestCase):
"""
One can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
"child_set-TOTAL_FORMS": "3",
"child_set-INITIAL_FORMS": "0",
"name": "Josh Stone",
"child_set-0-name": "Paul",
"child_set-1-name": "Catherine",
}
self.client.post(reverse("admin:admin_views_parent_add"), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name="Josh Stone")
paul = Child.objects.create(parent=parent, name="Paul")
catherine = Child.objects.create(parent=parent, name="Catherine")
post = {
"child_set-TOTAL_FORMS": "5",
"child_set-INITIAL_FORMS": "2",
"name": "Josh Stone",
"child_set-0-name": "Paul",
"child_set-0-id": paul.id,
"child_set-1-name": "Catherine",
"child_set-1-id": catherine.id,
}
self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.id,)), post
)
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name="Josh Rock")
Child.objects.create(parent=parent, name="Paul")
Child.objects.create(parent=parent, name="Catherine")
post = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": parent.id,
"form-0-name": "Josh Stone",
"_save": "Save",
}
self.client.post(reverse("admin:admin_views_parent_changelist"), post)
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewLogoutTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def test_logout(self):
self.client.force_login(self.superuser)
response = self.client.post(reverse("admin:logout"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "registration/logged_out.html")
self.assertEqual(response.request["PATH_INFO"], reverse("admin:logout"))
self.assertFalse(response.context["has_permission"])
self.assertNotContains(
response, "user-tools"
) # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.post(reverse("admin:logout"))
self.assertEqual(
response.status_code, 302
) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.post(reverse("admin:logout"), follow=True)
self.assertContains(
response,
'<input type="hidden" name="next" value="%s">' % reverse("admin:index"),
)
self.assertTemplateUsed(response, "admin/login.html")
self.assertEqual(response.request["PATH_INFO"], reverse("admin:login"))
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
"action": "message_%s" % level,
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_usermessenger_changelist"),
action_data,
follow=True,
)
self.assertContains(
response, '<li class="%s">Test %s</li>' % (level, level), html=True
)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message("debug")
def test_message_info(self):
self.send_message("info")
def test_message_success(self):
self.send_message("success")
def test_message_warning(self):
self.send_message("warning")
def test_message_error(self):
self.send_message("error")
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
"action": "message_extra_tags",
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_usermessenger_changelist"),
action_data,
follow=True,
)
self.assertContains(
response, '<li class="extra_tag info">Test tags</li>', html=True
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
def setUp(self):
self.client.force_login(self.superuser)
def assertURLEqual(self, url1, url2, msg_prefix=""):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if "_changelist_filters" in parsed_qs:
changelist_filters = parsed_qs["_changelist_filters"]
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs["_changelist_filters"] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse(
"admin:auth_user_change", args=(self.joepublicuser.pk,)
)
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D1%26is_superuser__exact%3D1".format(change_user_url),
)
# Ignore scheme and host.
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
)
# Ignore ordering of querystring.
self.assertURLEqual(
"{}?is_staff__exact=0&is_superuser__exact=0".format(
reverse("admin:auth_user_changelist")
),
"{}?is_superuser__exact=0&is_staff__exact=0".format(
reverse("admin:auth_user_changelist")
),
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
"{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"{}?_changelist_filters="
"is_superuser__exact%3D0%26is_staff__exact%3D0".format(change_user_url),
)
def get_changelist_filters(self):
return {
"is_superuser__exact": 0,
"is_staff__exact": 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode(
{"_changelist_filters": self.get_changelist_filters_querystring()}
)
def get_sample_user_id(self):
return self.joepublicuser.pk
def get_changelist_url(self):
return "%s?%s" % (
reverse("admin:auth_user_changelist", current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self, add_preserved_filters=True):
url = reverse("admin:auth_user_add", current_app=self.admin_site.name)
if add_preserved_filters:
url = "%s?%s" % (url, self.get_preserved_filters_querystring())
return url
def get_change_url(self, user_id=None, add_preserved_filters=True):
if user_id is None:
user_id = self.get_sample_user_id()
url = reverse(
"admin:auth_user_change", args=(user_id,), current_app=self.admin_site.name
)
if add_preserved_filters:
url = "%s?%s" % (url, self.get_preserved_filters_querystring())
return url
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse(
"admin:auth_user_history",
args=(user_id,),
current_app=self.admin_site.name,
),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse(
"admin:auth_user_delete",
args=(user_id,),
current_app=self.admin_site.name,
),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.joepublicuser.username),
response.content.decode(),
)
self.assertURLEqual(detail_link[1], self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode(),
)
self.assertURLEqual(
form_action[1], "?%s" % self.get_preserved_filters_querystring()
)
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>', response.content.decode()
)
self.assertURLEqual(history_link[1], self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>', response.content.decode()
)
self.assertURLEqual(delete_link[1], self.get_delete_url())
# Test redirect on "Save".
post_data = {
"username": "joepublic",
"last_login_0": "2007-05-30",
"last_login_1": "13:20:10",
"date_joined_0": "2007-05-30",
"date_joined_1": "13:20:10",
}
post_data["_save"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_changelist_url())
post_data.pop("_save")
# Test redirect on "Save and continue".
post_data["_continue"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_change_url())
post_data.pop("_continue")
# Test redirect on "Save and add new".
post_data["_addanother"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop("_addanother")
def test_change_view_without_preserved_filters(self):
response = self.client.get(self.get_change_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode(),
)
self.assertURLEqual(
form_action[1], "?%s" % self.get_preserved_filters_querystring()
)
post_data = {
"username": "dummy",
"password1": "test",
"password2": "test",
}
# Test redirect on "Save".
post_data["_save"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(
response, self.get_change_url(User.objects.get(username="dummy").pk)
)
post_data.pop("_save")
# Test redirect on "Save and continue".
post_data["username"] = "dummy2"
post_data["_continue"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(
response, self.get_change_url(User.objects.get(username="dummy2").pk)
)
post_data.pop("_continue")
# Test redirect on "Save and add new".
post_data["username"] = "dummy3"
post_data["_addanother"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop("_addanother")
def test_add_view_without_preserved_filters(self):
response = self.client.get(self.get_add_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {"post": "yes"})
self.assertRedirects(response, self.get_changelist_url())
def test_url_prefix(self):
context = {
"preserved_filters": self.get_preserved_filters_querystring(),
"opts": User._meta,
}
prefixes = ("", "/prefix/", "/後台/")
for prefix in prefixes:
with self.subTest(prefix=prefix), override_script_prefix(prefix):
url = reverse(
"admin:auth_user_changelist", current_app=self.admin_site.name
)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestLabelVisibility(TestCase):
"""#11277 -Labels of hidden fields in admin were not hidden."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_all_fields_visible(self):
response = self.client.get(reverse("admin:admin_views_emptymodelvisible_add"))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, "first")
self.assert_field_visible(response, "second")
def test_all_fields_hidden(self):
response = self.client.get(reverse("admin:admin_views_emptymodelhidden_add"))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, "first")
self.assert_field_hidden(response, "second")
def test_mixin(self):
response = self.client.get(reverse("admin:admin_views_emptymodelmixin_add"))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, "first")
self.assert_field_visible(response, "second")
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="fieldBox field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(
response, '<div class="fieldBox field-%s hidden">' % field_name
)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = State.objects.create(name="New York")
cls.s2 = State.objects.create(name="Illinois")
cls.s3 = State.objects.create(name="California")
cls.c1 = City.objects.create(state=cls.s1, name="New York")
cls.c2 = City.objects.create(state=cls.s2, name="Chicago")
cls.c3 = City.objects.create(state=cls.s3, name="San Francisco")
cls.r1 = Restaurant.objects.create(city=cls.c1, name="Italian Pizza")
cls.r2 = Restaurant.objects.create(city=cls.c1, name="Boulevard")
cls.r3 = Restaurant.objects.create(city=cls.c2, name="Chinese Dinner")
cls.r4 = Restaurant.objects.create(city=cls.c2, name="Angels")
cls.r5 = Restaurant.objects.create(city=cls.c2, name="Take Away")
cls.r6 = Restaurant.objects.create(city=cls.c3, name="The Unknown Restaurant")
cls.w1 = Worker.objects.create(work_at=cls.r1, name="Mario", surname="Rossi")
cls.w2 = Worker.objects.create(
work_at=cls.r1, name="Antonio", surname="Bianchi"
)
cls.w3 = Worker.objects.create(work_at=cls.r1, name="John", surname="Doe")
def setUp(self):
self.client.force_login(self.superuser)
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data.
Also, assertFormError() and assertFormsetError() is usable for admin
forms and formsets.
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2",
}
response = self.client.post(
reverse("admin:admin_views_parentwithdependentchildren_add"), post_data
)
self.assertFormError(
response.context["adminform"],
"some_required_info",
["This field is required."],
)
self.assertFormError(response.context["adminform"], None, [])
self.assertFormsetError(
response.context["inline_admin_formset"],
0,
None,
[
"Children must share a family name with their parents in this "
"contrived test case"
],
)
self.assertFormsetError(
response.context["inline_admin_formset"], None, None, []
)
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(
some_required_info=6, family_name="Test1"
)
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1",
}
response = self.client.post(
reverse(
"admin:admin_views_parentwithdependentchildren_change", args=(pwdc.id,)
),
post_data,
)
self.assertFormError(
response.context["adminform"],
"some_required_info",
["This field is required."],
)
self.assertFormsetError(
response.context["inline_admin_formset"],
0,
None,
[
"Children must share a family name with their parents in this "
"contrived test case"
],
)
def test_check(self):
"The view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(
admin.check(),
[
Error(
"The value of 'view_on_site' must be a callable or a boolean "
"value.",
obj=CityAdmin,
id="admin.E025",
),
],
)
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(
reverse("admin:admin_views_restaurant_change", args=(self.r1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(
response, reverse("admin:view_on_site", args=(content_type_pk, 1))
)
def test_true(self):
"The default behavior is followed if view_on_site is True"
response = self.client.get(
reverse("admin:admin_views_city_change", args=(self.c1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.c1.pk))
)
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(
reverse("admin:admin_views_worker_change", args=(self.w1.pk,))
)
self.assertContains(
response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name)
)
def test_missing_get_absolute_url(self):
"None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
def test_custom_admin_site(self):
model_admin = ModelAdmin(City, customadmin.site)
content_type_pk = ContentType.objects.get_for_model(City).pk
redirect_url = model_admin.get_view_on_site_url(self.c1)
self.assertEqual(
redirect_url,
reverse(
f"{customadmin.site.name}:view_on_site",
kwargs={
"content_type_id": content_type_pk,
"object_id": self.c1.pk,
},
),
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = State.objects.create(name="New York")
cls.s2 = State.objects.create(name="Illinois")
cls.s3 = State.objects.create(name="California")
cls.c1 = City.objects.create(state=cls.s1, name="New York")
cls.c2 = City.objects.create(state=cls.s2, name="Chicago")
cls.c3 = City.objects.create(state=cls.s3, name="San Francisco")
cls.r1 = Restaurant.objects.create(city=cls.c1, name="Italian Pizza")
cls.r2 = Restaurant.objects.create(city=cls.c1, name="Boulevard")
cls.r3 = Restaurant.objects.create(city=cls.c2, name="Chinese Dinner")
cls.r4 = Restaurant.objects.create(city=cls.c2, name="Angels")
cls.r5 = Restaurant.objects.create(city=cls.c2, name="Take Away")
cls.r6 = Restaurant.objects.create(city=cls.c3, name="The Unknown Restaurant")
cls.w1 = Worker.objects.create(work_at=cls.r1, name="Mario", surname="Rossi")
cls.w2 = Worker.objects.create(
work_at=cls.r1, name="Antonio", surname="Bianchi"
)
cls.w3 = Worker.objects.create(work_at=cls.r1, name="John", surname="Doe")
def setUp(self):
self.client.force_login(self.superuser)
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(
reverse("admin:admin_views_state_change", args=(self.s1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.c1.pk))
)
def test_true(self):
"The 'View on site' button is displayed if view_on_site is True"
response = self.client.get(
reverse("admin:admin_views_city_change", args=(self.c1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.r1.pk))
)
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(
reverse("admin:admin_views_restaurant_change", args=(self.r1.pk,))
)
self.assertContains(
response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name)
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_explicitly_provided_pk(self):
post_data = {"name": "1"}
response = self.client.post(
reverse("admin:admin_views_explicitlyprovidedpk_add"), post_data
)
self.assertEqual(response.status_code, 302)
post_data = {"name": "2"}
response = self.client.post(
reverse("admin:admin_views_explicitlyprovidedpk_change", args=(1,)),
post_data,
)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {"name": "1"}
response = self.client.post(
reverse("admin:admin_views_implicitlygeneratedpk_add"), post_data
)
self.assertEqual(response.status_code, 302)
post_data = {"name": "2"}
response = self.client.post(
reverse("admin:admin_views_implicitlygeneratedpk_change", args=(1,)),
post_data,
)
self.assertEqual(response.status_code, 302)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminSiteFinalCatchAllPatternTests(TestCase):
"""
Verifies the behaviour of the admin catch-all view.
* Anonynous/non-staff users are redirected to login for all URLs, whether
otherwise valid or not.
* APPEND_SLASH is applied for staff if needed.
* Otherwise Http404.
* Catch-all view disabled via AdminSite.final_catch_all_view.
"""
def test_unknown_url_redirects_login_if_not_authenticated(self):
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), unknown_url)
)
def test_unknown_url_404_if_authenticated(self):
superuser = User.objects.create_superuser(
username="super",
password="secret",
email="[email protected]",
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_authenticated(self):
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), known_url)
)
def test_known_url_missing_slash_redirects_login_if_not_authenticated(self):
known_url = reverse("admin:admin_views_article_changelist")[:-1]
response = self.client.get(known_url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), known_url)
)
def test_non_admin_url_shares_url_prefix(self):
url = reverse("non_admin")[:-1]
response = self.client.get(url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
def test_url_without_trailing_slash_if_not_authenticated(self):
url = reverse("admin:article_extra_json")
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
def test_unkown_url_without_trailing_slash_if_not_authenticated(self):
url = reverse("admin:article_extra_json")[:-1]
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, target_status_code=403
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_script_name(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1], SCRIPT_NAME="/prefix/")
self.assertRedirects(
response,
"/prefix" + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True, FORCE_SCRIPT_NAME="/prefix/")
def test_missing_slash_append_slash_true_force_script_name(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
"/prefix" + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_non_staff_user(self):
user = User.objects.create_user(
username="user",
password="secret",
email="[email protected]",
is_staff=False,
)
self.client.force_login(user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
"/test_admin/admin/login/?next=/test_admin/admin/admin_views/article",
)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_single_model_no_append_slash(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin9:admin_views_actor_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Same tests above with final_catch_all_view=False.
def test_unknown_url_404_if_not_authenticated_without_final_catch_all_view(self):
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_unknown_url_404_if_authenticated_without_final_catch_all_view(self):
superuser = User.objects.create_superuser(
username="super",
password="secret",
email="[email protected]",
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_auth_without_final_catch_all_view(
self,
):
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin10:login"), known_url)
)
def test_known_url_missing_slash_redirects_with_slash_if_not_auth_no_catch_all_view(
self,
):
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, fetch_redirect_response=False
)
def test_non_admin_url_shares_url_prefix_without_final_catch_all_view(self):
url = reverse("non_admin10")
response = self.client.get(url[:-1])
self.assertRedirects(response, url, status_code=301)
def test_url_no_trailing_slash_if_not_auth_without_final_catch_all_view(
self,
):
url = reverse("admin10:article_extra_json")
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin10:login"), url))
def test_unknown_url_no_trailing_slash_if_not_auth_without_final_catch_all_view(
self,
):
url = reverse("admin10:article_extra_json")[:-1]
response = self.client.get(url)
# Matches test_admin/admin10/admin_views/article/<path:object_id>/
self.assertRedirects(
response, url + "/", status_code=301, fetch_redirect_response=False
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url_without_final_catch_all_view(
self,
):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_without_final_catch_all_view(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, target_status_code=403
)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false_without_final_catch_all_view(self):
superuser = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
self.client.force_login(superuser)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Outside admin.
def test_non_admin_url_404_if_not_authenticated(self):
unknown_url = "/unknown/"
response = self.client.get(unknown_url)
# Does not redirect to the admin login.
self.assertEqual(response.status_code, 404)
|
869cc4dbf4d1dc2e23ef2466d36309261c57e4faadd2aca948172203386ea72e | import datetime
import tempfile
import uuid
from django.contrib import admin
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.files.storage import FileSystemStorage
from django.db import models
class Section(models.Model):
"""
A simple section that links to articles, to test linking to related items
in admin views.
"""
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@property
def name_property(self):
"""
A property that simply returns the name. Used to test #24461
"""
return self.name
class Article(models.Model):
"""
A simple article to test admin views. Test backwards compatibility.
"""
title = models.CharField(max_length=100)
content = models.TextField()
date = models.DateTimeField()
section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True)
another_section = models.ForeignKey(
Section, models.CASCADE, null=True, blank=True, related_name="+"
)
sub_section = models.ForeignKey(
Section, models.SET_NULL, null=True, blank=True, related_name="+"
)
def __str__(self):
return self.title
@admin.display(ordering="date", description="")
def model_year(self):
return self.date.year
@admin.display(ordering="-date", description="")
def model_year_reversed(self):
return self.date.year
@property
@admin.display(ordering="date")
def model_property_year(self):
return self.date.year
@property
def model_month(self):
return self.date.month
class Book(models.Model):
"""
A simple book that has chapters.
"""
name = models.CharField(max_length=100, verbose_name="¿Name?")
def __str__(self):
return self.name
class Promo(models.Model):
name = models.CharField(max_length=100, verbose_name="¿Name?")
book = models.ForeignKey(Book, models.CASCADE)
author = models.ForeignKey(User, models.SET_NULL, blank=True, null=True)
def __str__(self):
return self.name
class Chapter(models.Model):
title = models.CharField(max_length=100, verbose_name="¿Title?")
content = models.TextField()
book = models.ForeignKey(Book, models.CASCADE)
class Meta:
# Use a utf-8 bytestring to ensure it works (see #11710)
verbose_name = "¿Chapter?"
def __str__(self):
return self.title
class ChapterXtra1(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name="¿Chap?")
xtra = models.CharField(max_length=100, verbose_name="¿Xtra?")
guest_author = models.ForeignKey(User, models.SET_NULL, blank=True, null=True)
def __str__(self):
return "¿Xtra1: %s" % self.xtra
class ChapterXtra2(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name="¿Chap?")
xtra = models.CharField(max_length=100, verbose_name="¿Xtra?")
def __str__(self):
return "¿Xtra2: %s" % self.xtra
class RowLevelChangePermissionModel(models.Model):
name = models.CharField(max_length=100, blank=True)
class CustomArticle(models.Model):
content = models.TextField()
date = models.DateTimeField()
class ModelWithStringPrimaryKey(models.Model):
string_pk = models.CharField(max_length=255, primary_key=True)
def __str__(self):
return self.string_pk
def get_absolute_url(self):
return "/dummy/%s/" % self.string_pk
class Color(models.Model):
value = models.CharField(max_length=10)
warm = models.BooleanField(default=False)
def __str__(self):
return self.value
# we replicate Color to register with another ModelAdmin
class Color2(Color):
class Meta:
proxy = True
class Thing(models.Model):
title = models.CharField(max_length=20)
color = models.ForeignKey(Color, models.CASCADE, limit_choices_to={"warm": True})
pub_date = models.DateField(blank=True, null=True)
def __str__(self):
return self.title
class Actor(models.Model):
name = models.CharField(max_length=50)
age = models.IntegerField()
title = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return self.name
class Inquisition(models.Model):
expected = models.BooleanField(default=False)
leader = models.ForeignKey(Actor, models.CASCADE)
country = models.CharField(max_length=20)
def __str__(self):
return "by %s from %s" % (self.leader, self.country)
class Sketch(models.Model):
title = models.CharField(max_length=100)
inquisition = models.ForeignKey(
Inquisition,
models.CASCADE,
limit_choices_to={
"leader__name": "Palin",
"leader__age": 27,
"expected": False,
},
)
defendant0 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={"title__isnull": False},
related_name="as_defendant0",
)
defendant1 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={"title__isnull": True},
related_name="as_defendant1",
)
def __str__(self):
return self.title
def today_callable_dict():
return {"last_action__gte": datetime.datetime.today()}
def today_callable_q():
return models.Q(last_action__gte=datetime.datetime.today())
class Character(models.Model):
username = models.CharField(max_length=100)
last_action = models.DateTimeField()
def __str__(self):
return self.username
class StumpJoke(models.Model):
variation = models.CharField(max_length=100)
most_recently_fooled = models.ForeignKey(
Character,
models.CASCADE,
limit_choices_to=today_callable_dict,
related_name="+",
)
has_fooled_today = models.ManyToManyField(
Character, limit_choices_to=today_callable_q, related_name="+"
)
def __str__(self):
return self.variation
class Fabric(models.Model):
NG_CHOICES = (
(
"Textured",
(
("x", "Horizontal"),
("y", "Vertical"),
),
),
("plain", "Smooth"),
)
surface = models.CharField(max_length=20, choices=NG_CHOICES)
class Person(models.Model):
GENDER_CHOICES = (
(1, "Male"),
(2, "Female"),
)
name = models.CharField(max_length=100)
gender = models.IntegerField(choices=GENDER_CHOICES)
age = models.IntegerField(default=21)
alive = models.BooleanField(default=True)
def __str__(self):
return self.name
class Persona(models.Model):
"""
A simple persona associated with accounts, to test inlining of related
accounts which inherit from a common accounts class.
"""
name = models.CharField(blank=False, max_length=80)
def __str__(self):
return self.name
class Account(models.Model):
"""
A simple, generic account encapsulating the information shared by all
types of accounts.
"""
username = models.CharField(blank=False, max_length=80)
persona = models.ForeignKey(Persona, models.CASCADE, related_name="accounts")
servicename = "generic service"
def __str__(self):
return "%s: %s" % (self.servicename, self.username)
class FooAccount(Account):
"""A service-specific account of type Foo."""
servicename = "foo"
class BarAccount(Account):
"""A service-specific account of type Bar."""
servicename = "bar"
class Subscriber(models.Model):
name = models.CharField(blank=False, max_length=80)
email = models.EmailField(blank=False, max_length=175)
def __str__(self):
return "%s (%s)" % (self.name, self.email)
class ExternalSubscriber(Subscriber):
pass
class OldSubscriber(Subscriber):
pass
class Media(models.Model):
name = models.CharField(max_length=60)
class Podcast(Media):
release_date = models.DateField()
class Meta:
ordering = ("release_date",) # overridden in PodcastAdmin
class Vodcast(Media):
media = models.OneToOneField(
Media, models.CASCADE, primary_key=True, parent_link=True
)
released = models.BooleanField(default=False)
class Parent(models.Model):
name = models.CharField(max_length=128)
def clean(self):
if self.name == "_invalid":
raise ValidationError("invalid")
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE, editable=False)
name = models.CharField(max_length=30, blank=True)
def clean(self):
if self.name == "_invalid":
raise ValidationError("invalid")
class PKChild(models.Model):
"""
Used to check autocomplete to_field resolution when ForeignKey is PK.
"""
parent = models.ForeignKey(Parent, models.CASCADE, primary_key=True)
name = models.CharField(max_length=128)
class Meta:
ordering = ["parent"]
def __str__(self):
return self.name
class Toy(models.Model):
child = models.ForeignKey(PKChild, models.CASCADE)
class EmptyModel(models.Model):
def __str__(self):
return "Primary key = %s" % self.id
temp_storage = FileSystemStorage(tempfile.mkdtemp())
class Gallery(models.Model):
name = models.CharField(max_length=100)
class Picture(models.Model):
name = models.CharField(max_length=100)
image = models.FileField(storage=temp_storage, upload_to="test_upload")
gallery = models.ForeignKey(Gallery, models.CASCADE, related_name="pictures")
class Language(models.Model):
iso = models.CharField(max_length=5, primary_key=True)
name = models.CharField(max_length=50)
english_name = models.CharField(max_length=50)
shortlist = models.BooleanField(default=False)
def __str__(self):
return self.iso
class Meta:
ordering = ("iso",)
# a base class for Recommender and Recommendation
class Title(models.Model):
pass
class TitleTranslation(models.Model):
title = models.ForeignKey(Title, models.CASCADE)
text = models.CharField(max_length=100)
class Recommender(Title):
pass
class Recommendation(Title):
the_recommender = models.ForeignKey(Recommender, models.CASCADE)
class Collector(models.Model):
name = models.CharField(max_length=100)
class Widget(models.Model):
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class DooHickey(models.Model):
code = models.CharField(max_length=10, primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Grommet(models.Model):
code = models.AutoField(primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Whatsit(models.Model):
index = models.IntegerField(primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Doodad(models.Model):
name = models.CharField(max_length=100)
class FancyDoodad(Doodad):
owner = models.ForeignKey(Collector, models.CASCADE)
expensive = models.BooleanField(default=True)
class Category(models.Model):
collector = models.ForeignKey(Collector, models.CASCADE)
order = models.PositiveIntegerField()
class Meta:
ordering = ("order",)
def __str__(self):
return "%s:o%s" % (self.id, self.order)
def link_posted_default():
return datetime.date.today() - datetime.timedelta(days=7)
class Link(models.Model):
posted = models.DateField(default=link_posted_default)
url = models.URLField()
post = models.ForeignKey("Post", models.CASCADE)
readonly_link_content = models.TextField()
class PrePopulatedPost(models.Model):
title = models.CharField(max_length=100)
published = models.BooleanField(default=False)
slug = models.SlugField()
class PrePopulatedSubPost(models.Model):
post = models.ForeignKey(PrePopulatedPost, models.CASCADE)
subtitle = models.CharField(max_length=100)
subslug = models.SlugField()
class Post(models.Model):
title = models.CharField(
max_length=100, help_text="Some help text for the title (with Unicode ŠĐĆŽćžšđ)"
)
content = models.TextField(
help_text="Some help text for the content (with Unicode ŠĐĆŽćžšđ)"
)
readonly_content = models.TextField()
posted = models.DateField(
default=datetime.date.today,
help_text="Some help text for the date (with Unicode ŠĐĆŽćžšđ)",
)
public = models.BooleanField(null=True, blank=True)
def awesomeness_level(self):
return "Very awesome."
# Proxy model to test overridden fields attrs on Post model so as not to
# interfere with other tests.
class FieldOverridePost(Post):
class Meta:
proxy = True
class Gadget(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Villain(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class SuperVillain(Villain):
pass
class FunkyTag(models.Model):
"Because we all know there's only one real use case for GFKs."
name = models.CharField(max_length=25)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
def __str__(self):
return self.name
class Plot(models.Model):
name = models.CharField(max_length=100)
team_leader = models.ForeignKey(Villain, models.CASCADE, related_name="lead_plots")
contact = models.ForeignKey(Villain, models.CASCADE, related_name="contact_plots")
tags = GenericRelation(FunkyTag)
def __str__(self):
return self.name
class PlotDetails(models.Model):
details = models.CharField(max_length=100)
plot = models.OneToOneField(Plot, models.CASCADE, null=True, blank=True)
def __str__(self):
return self.details
class PlotProxy(Plot):
class Meta:
proxy = True
class SecretHideout(models.Model):
"""Secret! Not registered with the admin!"""
location = models.CharField(max_length=100)
villain = models.ForeignKey(Villain, models.CASCADE)
def __str__(self):
return self.location
class SuperSecretHideout(models.Model):
"""Secret! Not registered with the admin!"""
location = models.CharField(max_length=100)
supervillain = models.ForeignKey(SuperVillain, models.CASCADE)
def __str__(self):
return self.location
class Bookmark(models.Model):
name = models.CharField(max_length=60)
tag = GenericRelation(FunkyTag, related_query_name="bookmark")
def __str__(self):
return self.name
class CyclicOne(models.Model):
name = models.CharField(max_length=25)
two = models.ForeignKey("CyclicTwo", models.CASCADE)
def __str__(self):
return self.name
class CyclicTwo(models.Model):
name = models.CharField(max_length=25)
one = models.ForeignKey(CyclicOne, models.CASCADE)
def __str__(self):
return self.name
class Topping(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class Pizza(models.Model):
name = models.CharField(max_length=20)
toppings = models.ManyToManyField("Topping", related_name="pizzas")
# Pizza's ModelAdmin has readonly_fields = ['toppings'].
# toppings is editable for this model's admin.
class ReadablePizza(Pizza):
class Meta:
proxy = True
# No default permissions are created for this model and both name and toppings
# are readonly for this model's admin.
class ReadOnlyPizza(Pizza):
class Meta:
proxy = True
default_permissions = ()
class Album(models.Model):
owner = models.ForeignKey(User, models.SET_NULL, null=True, blank=True)
title = models.CharField(max_length=30)
class Song(models.Model):
name = models.CharField(max_length=20)
album = models.ForeignKey(Album, on_delete=models.RESTRICT)
def __str__(self):
return self.name
class Employee(Person):
code = models.CharField(max_length=20)
class Meta:
ordering = ["name"]
class WorkHour(models.Model):
datum = models.DateField()
employee = models.ForeignKey(Employee, models.CASCADE)
class Manager(Employee):
"""
A multi-layer MTI child.
"""
pass
class Bonus(models.Model):
recipient = models.ForeignKey(Manager, on_delete=models.CASCADE)
class Question(models.Model):
big_id = models.BigAutoField(primary_key=True)
question = models.CharField(max_length=20)
posted = models.DateField(default=datetime.date.today)
expires = models.DateTimeField(null=True, blank=True)
related_questions = models.ManyToManyField("self")
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
def __str__(self):
return self.question
class Answer(models.Model):
question = models.ForeignKey(Question, models.PROTECT)
question_with_to_field = models.ForeignKey(
Question,
models.SET_NULL,
blank=True,
null=True,
to_field="uuid",
related_name="uuid_answers",
limit_choices_to=~models.Q(question__istartswith="not"),
)
related_answers = models.ManyToManyField("self")
answer = models.CharField(max_length=20)
def __str__(self):
return self.answer
class Answer2(Answer):
class Meta:
proxy = True
class Reservation(models.Model):
start_date = models.DateTimeField()
price = models.IntegerField()
class FoodDelivery(models.Model):
DRIVER_CHOICES = (
("bill", "Bill G"),
("steve", "Steve J"),
)
RESTAURANT_CHOICES = (
("indian", "A Taste of India"),
("thai", "Thai Pography"),
("pizza", "Pizza Mama"),
)
reference = models.CharField(max_length=100)
driver = models.CharField(max_length=100, choices=DRIVER_CHOICES, blank=True)
restaurant = models.CharField(
max_length=100, choices=RESTAURANT_CHOICES, blank=True
)
class Meta:
unique_together = (("driver", "restaurant"),)
class CoverLetter(models.Model):
author = models.CharField(max_length=30)
date_written = models.DateField(null=True, blank=True)
def __str__(self):
return self.author
class Paper(models.Model):
title = models.CharField(max_length=30)
author = models.CharField(max_length=30, blank=True, null=True)
class ShortMessage(models.Model):
content = models.CharField(max_length=140)
timestamp = models.DateTimeField(null=True, blank=True)
class Telegram(models.Model):
title = models.CharField(max_length=30)
date_sent = models.DateField(null=True, blank=True)
def __str__(self):
return self.title
class Story(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
class OtherStory(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
class ComplexSortedPerson(models.Model):
name = models.CharField(max_length=100)
age = models.PositiveIntegerField()
is_employee = models.BooleanField(null=True)
class PluggableSearchPerson(models.Model):
name = models.CharField(max_length=100)
age = models.PositiveIntegerField()
class PrePopulatedPostLargeSlug(models.Model):
"""
Regression test for #15938: a large max_length for the slugfield must not
be localized in prepopulated_fields_js.html or it might end up breaking
the JavaScript (ie, using THOUSAND_SEPARATOR ends up with maxLength=1,000)
"""
title = models.CharField(max_length=100)
published = models.BooleanField(default=False)
# `db_index=False` because MySQL cannot index large CharField (#21196).
slug = models.SlugField(max_length=1000, db_index=False)
class AdminOrderedField(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class AdminOrderedModelMethod(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
@admin.display(ordering="order")
def some_order(self):
return self.order
class AdminOrderedAdminMethod(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class AdminOrderedCallable(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class Report(models.Model):
title = models.CharField(max_length=100)
def __str__(self):
return self.title
class MainPrepopulated(models.Model):
name = models.CharField(max_length=100)
pubdate = models.DateField()
status = models.CharField(
max_length=20,
choices=(("option one", "Option One"), ("option two", "Option Two")),
)
slug1 = models.SlugField(blank=True)
slug2 = models.SlugField(blank=True)
slug3 = models.SlugField(blank=True, allow_unicode=True)
class RelatedPrepopulated(models.Model):
parent = models.ForeignKey(MainPrepopulated, models.CASCADE)
name = models.CharField(max_length=75)
fk = models.ForeignKey("self", models.CASCADE, blank=True, null=True)
m2m = models.ManyToManyField("self", blank=True)
pubdate = models.DateField()
status = models.CharField(
max_length=20,
choices=(("option one", "Option One"), ("option two", "Option Two")),
)
slug1 = models.SlugField(max_length=50)
slug2 = models.SlugField(max_length=60)
class UnorderedObject(models.Model):
"""
Model without any defined `Meta.ordering`.
Refs #16819.
"""
name = models.CharField(max_length=255)
bool = models.BooleanField(default=True)
class UndeletableObject(models.Model):
"""
Model whose show_delete in admin change_view has been disabled
Refs #10057.
"""
name = models.CharField(max_length=255)
class UnchangeableObject(models.Model):
"""
Model whose change_view is disabled in admin
Refs #20640.
"""
class UserMessenger(models.Model):
"""
Dummy class for testing message_user functions on ModelAdmin
"""
class Simple(models.Model):
"""
Simple model with nothing on it for use in testing
"""
class Choice(models.Model):
choice = models.IntegerField(
blank=True,
null=True,
choices=((1, "Yes"), (0, "No"), (None, "No opinion")),
)
class ParentWithDependentChildren(models.Model):
"""
Issue #20522
Model where the validation of child foreign-key relationships depends
on validation of the parent
"""
some_required_info = models.PositiveIntegerField()
family_name = models.CharField(max_length=255, blank=False)
class DependentChild(models.Model):
"""
Issue #20522
Model that depends on validation of the parent class for one of its
fields to validate during clean
"""
parent = models.ForeignKey(ParentWithDependentChildren, models.CASCADE)
family_name = models.CharField(max_length=255)
class _Manager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(pk__gt=1)
class FilteredManager(models.Model):
def __str__(self):
return "PK=%d" % self.pk
pk_gt_1 = _Manager()
objects = models.Manager()
class EmptyModelVisible(models.Model):
"""See ticket #11277."""
class EmptyModelHidden(models.Model):
"""See ticket #11277."""
class EmptyModelMixin(models.Model):
"""See ticket #11277."""
class State(models.Model):
name = models.CharField(max_length=100, verbose_name="State verbose_name")
class City(models.Model):
state = models.ForeignKey(State, models.CASCADE)
name = models.CharField(max_length=100, verbose_name="City verbose_name")
def get_absolute_url(self):
return "/dummy/%s/" % self.pk
class Restaurant(models.Model):
city = models.ForeignKey(City, models.CASCADE)
name = models.CharField(max_length=100)
def get_absolute_url(self):
return "/dummy/%s/" % self.pk
class Worker(models.Model):
work_at = models.ForeignKey(Restaurant, models.CASCADE)
name = models.CharField(max_length=50)
surname = models.CharField(max_length=50)
# Models for #23329
class ReferencedByParent(models.Model):
name = models.CharField(max_length=20, unique=True)
class ParentWithFK(models.Model):
fk = models.ForeignKey(
ReferencedByParent,
models.CASCADE,
to_field="name",
related_name="hidden+",
)
class ChildOfReferer(ParentWithFK):
pass
# Models for #23431
class InlineReferer(models.Model):
pass
class ReferencedByInline(models.Model):
name = models.CharField(max_length=20, unique=True)
class InlineReference(models.Model):
referer = models.ForeignKey(InlineReferer, models.CASCADE)
fk = models.ForeignKey(
ReferencedByInline,
models.CASCADE,
to_field="name",
related_name="hidden+",
)
class Recipe(models.Model):
rname = models.CharField(max_length=20, unique=True)
class Ingredient(models.Model):
iname = models.CharField(max_length=20, unique=True)
recipes = models.ManyToManyField(Recipe, through="RecipeIngredient")
class RecipeIngredient(models.Model):
ingredient = models.ForeignKey(Ingredient, models.CASCADE, to_field="iname")
recipe = models.ForeignKey(Recipe, models.CASCADE, to_field="rname")
# Model for #23839
class NotReferenced(models.Model):
# Don't point any FK at this model.
pass
# Models for #23934
class ExplicitlyProvidedPK(models.Model):
name = models.IntegerField(primary_key=True)
class ImplicitlyGeneratedPK(models.Model):
name = models.IntegerField(unique=True)
# Models for #25622
class ReferencedByGenRel(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
class GenRelReference(models.Model):
references = GenericRelation(ReferencedByGenRel)
class ParentWithUUIDPK(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
title = models.CharField(max_length=100)
def __str__(self):
return str(self.id)
class RelatedWithUUIDPKModel(models.Model):
parent = models.ForeignKey(
ParentWithUUIDPK, on_delete=models.SET_NULL, null=True, blank=True
)
class Author(models.Model):
pass
class Authorship(models.Model):
book = models.ForeignKey(Book, models.CASCADE)
author = models.ForeignKey(Author, models.CASCADE)
class UserProxy(User):
"""Proxy a model with a different app_label."""
class Meta:
proxy = True
class ReadOnlyRelatedField(models.Model):
chapter = models.ForeignKey(Chapter, models.CASCADE)
language = models.ForeignKey(Language, models.CASCADE)
user = models.ForeignKey(User, models.CASCADE)
class Héllo(models.Model):
pass
class Box(models.Model):
title = models.CharField(max_length=100)
next_box = models.ForeignKey(
"self", null=True, on_delete=models.SET_NULL, blank=True
)
next_box = models.ForeignKey(
"self", null=True, on_delete=models.SET_NULL, blank=True
)
class Country(models.Model):
NORTH_AMERICA = "North America"
SOUTH_AMERICA = "South America"
EUROPE = "Europe"
ASIA = "Asia"
OCEANIA = "Oceania"
ANTARCTICA = "Antarctica"
CONTINENT_CHOICES = [
(NORTH_AMERICA, NORTH_AMERICA),
(SOUTH_AMERICA, SOUTH_AMERICA),
(EUROPE, EUROPE),
(ASIA, ASIA),
(OCEANIA, OCEANIA),
(ANTARCTICA, ANTARCTICA),
]
name = models.CharField(max_length=80)
continent = models.CharField(max_length=13, choices=CONTINENT_CHOICES)
def __str__(self):
return self.name
class Traveler(models.Model):
born_country = models.ForeignKey(Country, models.CASCADE)
living_country = models.ForeignKey(
Country, models.CASCADE, related_name="living_country_set"
)
favorite_country_to_vacation = models.ForeignKey(
Country,
models.CASCADE,
related_name="favorite_country_to_vacation_set",
limit_choices_to={"continent": Country.ASIA},
)
|
d172c0d276235c0574c2ace6d3e78e150af0c27905e5c7de1658d95550599681 | import datetime
from io import StringIO
from wsgiref.util import FileWrapper
from django import forms
from django.contrib import admin
from django.contrib.admin import BooleanFieldListFilter
from django.contrib.admin.views.main import ChangeList
from django.contrib.auth.admin import GroupAdmin, UserAdmin
from django.contrib.auth.models import Group, User
from django.core.exceptions import ValidationError
from django.core.mail import EmailMessage
from django.db import models
from django.forms.models import BaseModelFormSet
from django.http import HttpResponse, JsonResponse, StreamingHttpResponse
from django.urls import path
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.views.decorators.common import no_append_slash
from .forms import MediaActionForm
from .models import (
Actor,
AdminOrderedAdminMethod,
AdminOrderedCallable,
AdminOrderedField,
AdminOrderedModelMethod,
Album,
Answer,
Answer2,
Article,
BarAccount,
Book,
Bookmark,
Box,
Category,
Chapter,
ChapterXtra1,
Child,
ChildOfReferer,
Choice,
City,
Collector,
Color,
Color2,
ComplexSortedPerson,
Country,
CoverLetter,
CustomArticle,
CyclicOne,
CyclicTwo,
DependentChild,
DooHickey,
EmptyModel,
EmptyModelHidden,
EmptyModelMixin,
EmptyModelVisible,
ExplicitlyProvidedPK,
ExternalSubscriber,
Fabric,
FancyDoodad,
FieldOverridePost,
FilteredManager,
FooAccount,
FoodDelivery,
FunkyTag,
Gadget,
Gallery,
GenRelReference,
Grommet,
ImplicitlyGeneratedPK,
Ingredient,
InlineReference,
InlineReferer,
Inquisition,
Language,
Link,
MainPrepopulated,
ModelWithStringPrimaryKey,
NotReferenced,
OldSubscriber,
OtherStory,
Paper,
Parent,
ParentWithDependentChildren,
ParentWithUUIDPK,
Person,
Persona,
Picture,
Pizza,
Plot,
PlotDetails,
PlotProxy,
PluggableSearchPerson,
Podcast,
Post,
PrePopulatedPost,
PrePopulatedPostLargeSlug,
PrePopulatedSubPost,
Promo,
Question,
ReadablePizza,
ReadOnlyPizza,
ReadOnlyRelatedField,
Recipe,
Recommendation,
Recommender,
ReferencedByGenRel,
ReferencedByInline,
ReferencedByParent,
RelatedPrepopulated,
RelatedWithUUIDPKModel,
Report,
Reservation,
Restaurant,
RowLevelChangePermissionModel,
Section,
ShortMessage,
Simple,
Sketch,
Song,
State,
Story,
StumpJoke,
Subscriber,
SuperVillain,
Telegram,
Thing,
Topping,
Traveler,
UnchangeableObject,
UndeletableObject,
UnorderedObject,
UserMessenger,
UserProxy,
Villain,
Vodcast,
Whatsit,
Widget,
Worker,
WorkHour,
)
@admin.display(ordering="date")
def callable_year(dt_value):
try:
return dt_value.year
except AttributeError:
return None
class ArticleInline(admin.TabularInline):
model = Article
fk_name = "section"
prepopulated_fields = {"title": ("content",)}
fieldsets = (
("Some fields", {"classes": ("collapse",), "fields": ("title", "content")}),
("Some other fields", {"classes": ("wide",), "fields": ("date", "section")}),
)
class ChapterInline(admin.TabularInline):
model = Chapter
class ChapterXtra1Admin(admin.ModelAdmin):
list_filter = (
"chap",
"chap__title",
"chap__book",
"chap__book__name",
"chap__book__promo",
"chap__book__promo__name",
"guest_author__promo__book",
)
class ArticleForm(forms.ModelForm):
extra_form_field = forms.BooleanField(required=False)
class Meta:
fields = "__all__"
model = Article
class ArticleAdminWithExtraUrl(admin.ModelAdmin):
def get_urls(self):
urlpatterns = super().get_urls()
urlpatterns.append(
path(
"extra.json",
self.admin_site.admin_view(self.extra_json),
name="article_extra_json",
)
)
return urlpatterns
def extra_json(self, request):
return JsonResponse({})
class ArticleAdmin(ArticleAdminWithExtraUrl):
list_display = (
"content",
"date",
callable_year,
"model_year",
"modeladmin_year",
"model_year_reversed",
"section",
lambda obj: obj.title,
"order_by_expression",
"model_property_year",
"model_month",
"order_by_f_expression",
"order_by_orderby_expression",
)
list_editable = ("section",)
list_filter = ("date", "section")
autocomplete_fields = ("section",)
view_on_site = False
form = ArticleForm
fieldsets = (
(
"Some fields",
{
"classes": ("collapse",),
"fields": ("title", "content", "extra_form_field"),
},
),
(
"Some other fields",
{"classes": ("wide",), "fields": ("date", "section", "sub_section")},
),
)
# These orderings aren't particularly useful but show that expressions can
# be used for admin_order_field.
@admin.display(ordering=models.F("date") + datetime.timedelta(days=3))
def order_by_expression(self, obj):
return obj.model_year
@admin.display(ordering=models.F("date"))
def order_by_f_expression(self, obj):
return obj.model_year
@admin.display(ordering=models.F("date").asc(nulls_last=True))
def order_by_orderby_expression(self, obj):
return obj.model_year
def changelist_view(self, request):
return super().changelist_view(request, extra_context={"extra_var": "Hello!"})
@admin.display(ordering="date", description=None)
def modeladmin_year(self, obj):
return obj.date.year
def delete_model(self, request, obj):
EmailMessage(
"Greetings from a deleted object",
"I hereby inform you that some user deleted me",
"[email protected]",
["[email protected]"],
).send()
return super().delete_model(request, obj)
def save_model(self, request, obj, form, change=True):
EmailMessage(
"Greetings from a created object",
"I hereby inform you that some user created me",
"[email protected]",
["[email protected]"],
).send()
return super().save_model(request, obj, form, change)
class ArticleAdmin2(admin.ModelAdmin):
def has_module_permission(self, request):
return False
class RowLevelChangePermissionModelAdmin(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
"""Only allow changing objects with even id number"""
return request.user.is_staff and (obj is not None) and (obj.id % 2 == 0)
def has_view_permission(self, request, obj=None):
"""Only allow viewing objects if id is a multiple of 3."""
return request.user.is_staff and obj is not None and obj.id % 3 == 0
class CustomArticleAdmin(admin.ModelAdmin):
"""
Tests various hooks for using custom templates and contexts.
"""
change_list_template = "custom_admin/change_list.html"
change_form_template = "custom_admin/change_form.html"
add_form_template = "custom_admin/add_form.html"
object_history_template = "custom_admin/object_history.html"
delete_confirmation_template = "custom_admin/delete_confirmation.html"
delete_selected_confirmation_template = (
"custom_admin/delete_selected_confirmation.html"
)
popup_response_template = "custom_admin/popup_response.html"
def changelist_view(self, request):
return super().changelist_view(request, extra_context={"extra_var": "Hello!"})
class ThingAdmin(admin.ModelAdmin):
list_filter = ("color", "color__warm", "color__value", "pub_date")
class InquisitionAdmin(admin.ModelAdmin):
list_display = ("leader", "country", "expected", "sketch")
@admin.display
def sketch(self, obj):
# A method with the same name as a reverse accessor.
return "list-display-sketch"
class SketchAdmin(admin.ModelAdmin):
raw_id_fields = ("inquisition", "defendant0", "defendant1")
class FabricAdmin(admin.ModelAdmin):
list_display = ("surface",)
list_filter = ("surface",)
class BasePersonModelFormSet(BaseModelFormSet):
def clean(self):
for person_dict in self.cleaned_data:
person = person_dict.get("id")
alive = person_dict.get("alive")
if person and alive and person.name == "Grace Hopper":
raise ValidationError("Grace is not a Zombie")
class PersonAdmin(admin.ModelAdmin):
list_display = ("name", "gender", "alive")
list_editable = ("gender", "alive")
list_filter = ("gender",)
search_fields = ("^name",)
save_as = True
def get_changelist_formset(self, request, **kwargs):
return super().get_changelist_formset(
request, formset=BasePersonModelFormSet, **kwargs
)
def get_queryset(self, request):
# Order by a field that isn't in list display, to be able to test
# whether ordering is preserved.
return super().get_queryset(request).order_by("age")
class FooAccountAdmin(admin.StackedInline):
model = FooAccount
extra = 1
class BarAccountAdmin(admin.StackedInline):
model = BarAccount
extra = 1
class PersonaAdmin(admin.ModelAdmin):
inlines = (FooAccountAdmin, BarAccountAdmin)
class SubscriberAdmin(admin.ModelAdmin):
actions = ["mail_admin"]
action_form = MediaActionForm
def delete_queryset(self, request, queryset):
SubscriberAdmin.overridden = True
super().delete_queryset(request, queryset)
@admin.action
def mail_admin(self, request, selected):
EmailMessage(
"Greetings from a ModelAdmin action",
"This is the test email from an admin action",
"[email protected]",
["[email protected]"],
).send()
@admin.action(description="External mail (Another awesome action)")
def external_mail(modeladmin, request, selected):
EmailMessage(
"Greetings from a function action",
"This is the test email from a function action",
"[email protected]",
["[email protected]"],
).send()
@admin.action(description="Redirect to (Awesome action)")
def redirect_to(modeladmin, request, selected):
from django.http import HttpResponseRedirect
return HttpResponseRedirect("/some-where-else/")
@admin.action(description="Download subscription")
def download(modeladmin, request, selected):
buf = StringIO("This is the content of the file")
return StreamingHttpResponse(FileWrapper(buf))
@admin.action(description="No permission to run")
def no_perm(modeladmin, request, selected):
return HttpResponse(content="No permission to perform this action", status=403)
class ExternalSubscriberAdmin(admin.ModelAdmin):
actions = [redirect_to, external_mail, download, no_perm]
class PodcastAdmin(admin.ModelAdmin):
list_display = ("name", "release_date")
list_editable = ("release_date",)
date_hierarchy = "release_date"
ordering = ("name",)
class VodcastAdmin(admin.ModelAdmin):
list_display = ("name", "released")
list_editable = ("released",)
ordering = ("name",)
class ChildInline(admin.StackedInline):
model = Child
class ParentAdmin(admin.ModelAdmin):
model = Parent
inlines = [ChildInline]
save_as = True
list_display = (
"id",
"name",
)
list_display_links = ("id",)
list_editable = ("name",)
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
first_name, last_name = form.instance.name.split()
for child in form.instance.child_set.all():
if len(child.name.split()) < 2:
child.name = child.name + " " + last_name
child.save()
class EmptyModelAdmin(admin.ModelAdmin):
def get_queryset(self, request):
return super().get_queryset(request).filter(pk__gt=1)
class OldSubscriberAdmin(admin.ModelAdmin):
actions = None
class PictureInline(admin.TabularInline):
model = Picture
extra = 1
class GalleryAdmin(admin.ModelAdmin):
inlines = [PictureInline]
class PictureAdmin(admin.ModelAdmin):
pass
class LanguageAdmin(admin.ModelAdmin):
list_display = ["iso", "shortlist", "english_name", "name"]
list_editable = ["shortlist"]
class RecommendationAdmin(admin.ModelAdmin):
show_full_result_count = False
search_fields = (
"=titletranslation__text",
"=the_recommender__titletranslation__text",
)
class WidgetInline(admin.StackedInline):
model = Widget
class DooHickeyInline(admin.StackedInline):
model = DooHickey
class GrommetInline(admin.StackedInline):
model = Grommet
class WhatsitInline(admin.StackedInline):
model = Whatsit
class FancyDoodadInline(admin.StackedInline):
model = FancyDoodad
class CategoryAdmin(admin.ModelAdmin):
list_display = ("id", "collector", "order")
list_editable = ("order",)
class CategoryInline(admin.StackedInline):
model = Category
class CollectorAdmin(admin.ModelAdmin):
inlines = [
WidgetInline,
DooHickeyInline,
GrommetInline,
WhatsitInline,
FancyDoodadInline,
CategoryInline,
]
class LinkInline(admin.TabularInline):
model = Link
extra = 1
readonly_fields = ("posted", "multiline", "readonly_link_content")
@admin.display
def multiline(self, instance):
return "InlineMultiline\ntest\nstring"
class SubPostInline(admin.TabularInline):
model = PrePopulatedSubPost
prepopulated_fields = {"subslug": ("subtitle",)}
def get_readonly_fields(self, request, obj=None):
if obj and obj.published:
return ("subslug",)
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
if obj and obj.published:
return {}
return self.prepopulated_fields
class PrePopulatedPostAdmin(admin.ModelAdmin):
list_display = ["title", "slug"]
prepopulated_fields = {"slug": ("title",)}
inlines = [SubPostInline]
def get_readonly_fields(self, request, obj=None):
if obj and obj.published:
return ("slug",)
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
if obj and obj.published:
return {}
return self.prepopulated_fields
class PrePopulatedPostReadOnlyAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("title",)}
def has_change_permission(self, *args, **kwargs):
return False
class PostAdmin(admin.ModelAdmin):
list_display = ["title", "public"]
readonly_fields = (
"posted",
"awesomeness_level",
"coolness",
"value",
"multiline",
"multiline_html",
lambda obj: "foo",
"readonly_content",
)
inlines = [LinkInline]
@admin.display
def coolness(self, instance):
if instance.pk:
return "%d amount of cool." % instance.pk
else:
return "Unknown coolness."
@admin.display(description="Value in $US")
def value(self, instance):
return 1000
@admin.display
def multiline(self, instance):
return "Multiline\ntest\nstring"
@admin.display
def multiline_html(self, instance):
return mark_safe("Multiline<br>\nhtml<br>\ncontent")
class FieldOverridePostForm(forms.ModelForm):
model = FieldOverridePost
class Meta:
help_texts = {
"posted": "Overridden help text for the date",
}
labels = {
"public": "Overridden public label",
}
class FieldOverridePostAdmin(PostAdmin):
form = FieldOverridePostForm
class CustomChangeList(ChangeList):
def get_queryset(self, request):
return self.root_queryset.order_by("pk").filter(pk=9999) # Doesn't exist
class GadgetAdmin(admin.ModelAdmin):
def get_changelist(self, request, **kwargs):
return CustomChangeList
class ToppingAdmin(admin.ModelAdmin):
readonly_fields = ("pizzas",)
class PizzaAdmin(admin.ModelAdmin):
readonly_fields = ("toppings",)
class ReadOnlyRelatedFieldAdmin(admin.ModelAdmin):
readonly_fields = ("chapter", "language", "user")
class StudentAdmin(admin.ModelAdmin):
search_fields = ("name",)
class ReadOnlyPizzaAdmin(admin.ModelAdmin):
readonly_fields = ("name", "toppings")
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
return True
def has_delete_permission(self, request, obj=None):
return True
class WorkHourAdmin(admin.ModelAdmin):
list_display = ("datum", "employee")
list_filter = ("employee",)
class FoodDeliveryAdmin(admin.ModelAdmin):
list_display = ("reference", "driver", "restaurant")
list_editable = ("driver", "restaurant")
class CoverLetterAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses defer(), to test
verbose_name display in messages shown after adding/editing CoverLetter
instances. Note that the CoverLetter model defines a __str__ method.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).defer("date_written")
class PaperAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses only(), to test
verbose_name display in messages shown after adding/editing Paper
instances.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).only("title")
class ShortMessageAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses defer(), to test
verbose_name display in messages shown after adding/editing ShortMessage
instances.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).defer("timestamp")
class TelegramAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses only(), to test
verbose_name display in messages shown after adding/editing Telegram
instances. Note that the Telegram model defines a __str__ method.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).only("title")
class StoryForm(forms.ModelForm):
class Meta:
widgets = {"title": forms.HiddenInput}
class StoryAdmin(admin.ModelAdmin):
list_display = ("id", "title", "content")
list_display_links = ("title",) # 'id' not in list_display_links
list_editable = ("content",)
form = StoryForm
ordering = ["-id"]
class OtherStoryAdmin(admin.ModelAdmin):
list_display = ("id", "title", "content")
list_display_links = ("title", "id") # 'id' in list_display_links
list_editable = ("content",)
ordering = ["-id"]
class ComplexSortedPersonAdmin(admin.ModelAdmin):
list_display = ("name", "age", "is_employee", "colored_name")
ordering = ("name",)
@admin.display(ordering="name")
def colored_name(self, obj):
return format_html('<span style="color: #ff00ff;">{}</span>', obj.name)
class PluggableSearchPersonAdmin(admin.ModelAdmin):
list_display = ("name", "age")
search_fields = ("name",)
def get_search_results(self, request, queryset, search_term):
queryset, may_have_duplicates = super().get_search_results(
request,
queryset,
search_term,
)
try:
search_term_as_int = int(search_term)
except ValueError:
pass
else:
queryset |= self.model.objects.filter(age=search_term_as_int)
return queryset, may_have_duplicates
class AlbumAdmin(admin.ModelAdmin):
list_filter = ["title"]
class QuestionAdmin(admin.ModelAdmin):
ordering = ["-posted"]
search_fields = ["question"]
autocomplete_fields = ["related_questions"]
class AnswerAdmin(admin.ModelAdmin):
autocomplete_fields = ["question"]
class PrePopulatedPostLargeSlugAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("title",)}
class AdminOrderedFieldAdmin(admin.ModelAdmin):
ordering = ("order",)
list_display = ("stuff", "order")
class AdminOrderedModelMethodAdmin(admin.ModelAdmin):
ordering = ("order",)
list_display = ("stuff", "some_order")
class AdminOrderedAdminMethodAdmin(admin.ModelAdmin):
@admin.display(ordering="order")
def some_admin_order(self, obj):
return obj.order
ordering = ("order",)
list_display = ("stuff", "some_admin_order")
@admin.display(ordering="order")
def admin_ordered_callable(obj):
return obj.order
class AdminOrderedCallableAdmin(admin.ModelAdmin):
ordering = ("order",)
list_display = ("stuff", admin_ordered_callable)
class ReportAdmin(admin.ModelAdmin):
def extra(self, request):
return HttpResponse()
def get_urls(self):
# Corner case: Don't call parent implementation
return [path("extra/", self.extra, name="cable_extra")]
class CustomTemplateBooleanFieldListFilter(BooleanFieldListFilter):
template = "custom_filter_template.html"
class CustomTemplateFilterColorAdmin(admin.ModelAdmin):
list_filter = (("warm", CustomTemplateBooleanFieldListFilter),)
# For Selenium Prepopulated tests -------------------------------------
class RelatedPrepopulatedInline1(admin.StackedInline):
fieldsets = (
(
None,
{
"fields": (
("fk", "m2m"),
("pubdate", "status"),
(
"name",
"slug1",
"slug2",
),
),
},
),
)
formfield_overrides = {models.CharField: {"strip": False}}
model = RelatedPrepopulated
extra = 1
autocomplete_fields = ["fk", "m2m"]
prepopulated_fields = {
"slug1": ["name", "pubdate"],
"slug2": ["status", "name"],
}
class RelatedPrepopulatedInline2(admin.TabularInline):
model = RelatedPrepopulated
extra = 1
autocomplete_fields = ["fk", "m2m"]
prepopulated_fields = {
"slug1": ["name", "pubdate"],
"slug2": ["status", "name"],
}
class RelatedPrepopulatedInline3(admin.TabularInline):
model = RelatedPrepopulated
extra = 0
autocomplete_fields = ["fk", "m2m"]
class RelatedPrepopulatedStackedInlineNoFieldsets(admin.StackedInline):
model = RelatedPrepopulated
extra = 1
prepopulated_fields = {
"slug1": ["name", "pubdate"],
"slug2": ["status"],
}
class MainPrepopulatedAdmin(admin.ModelAdmin):
inlines = [
RelatedPrepopulatedInline1,
RelatedPrepopulatedInline2,
RelatedPrepopulatedInline3,
RelatedPrepopulatedStackedInlineNoFieldsets,
]
fieldsets = (
(
None,
{"fields": (("pubdate", "status"), ("name", "slug1", "slug2", "slug3"))},
),
)
formfield_overrides = {models.CharField: {"strip": False}}
prepopulated_fields = {
"slug1": ["name", "pubdate"],
"slug2": ["status", "name"],
"slug3": ["name"],
}
class UnorderedObjectAdmin(admin.ModelAdmin):
list_display = ["id", "name"]
list_display_links = ["id"]
list_editable = ["name"]
list_per_page = 2
class UndeletableObjectAdmin(admin.ModelAdmin):
def change_view(self, *args, **kwargs):
kwargs["extra_context"] = {"show_delete": False}
return super().change_view(*args, **kwargs)
class UnchangeableObjectAdmin(admin.ModelAdmin):
def get_urls(self):
# Disable change_view, but leave other urls untouched
urlpatterns = super().get_urls()
return [p for p in urlpatterns if p.name and not p.name.endswith("_change")]
@admin.display
def callable_on_unknown(obj):
return obj.unknown
class AttributeErrorRaisingAdmin(admin.ModelAdmin):
list_display = [callable_on_unknown]
class CustomManagerAdmin(admin.ModelAdmin):
def get_queryset(self, request):
return FilteredManager.objects
class MessageTestingAdmin(admin.ModelAdmin):
actions = [
"message_debug",
"message_info",
"message_success",
"message_warning",
"message_error",
"message_extra_tags",
]
@admin.action
def message_debug(self, request, selected):
self.message_user(request, "Test debug", level="debug")
@admin.action
def message_info(self, request, selected):
self.message_user(request, "Test info", level="info")
@admin.action
def message_success(self, request, selected):
self.message_user(request, "Test success", level="success")
@admin.action
def message_warning(self, request, selected):
self.message_user(request, "Test warning", level="warning")
@admin.action
def message_error(self, request, selected):
self.message_user(request, "Test error", level="error")
@admin.action
def message_extra_tags(self, request, selected):
self.message_user(request, "Test tags", extra_tags="extra_tag")
class ChoiceList(admin.ModelAdmin):
list_display = ["choice"]
readonly_fields = ["choice"]
fields = ["choice"]
class DependentChildAdminForm(forms.ModelForm):
"""
Issue #20522
Form to test child dependency on parent object's validation
"""
def clean(self):
parent = self.cleaned_data.get("parent")
if parent.family_name and parent.family_name != self.cleaned_data.get(
"family_name"
):
raise ValidationError(
"Children must share a family name with their parents "
+ "in this contrived test case"
)
return super().clean()
class DependentChildInline(admin.TabularInline):
model = DependentChild
form = DependentChildAdminForm
class ParentWithDependentChildrenAdmin(admin.ModelAdmin):
inlines = [DependentChildInline]
# Tests for ticket 11277 ----------------------------------
class FormWithoutHiddenField(forms.ModelForm):
first = forms.CharField()
second = forms.CharField()
class FormWithoutVisibleField(forms.ModelForm):
first = forms.CharField(widget=forms.HiddenInput)
second = forms.CharField(widget=forms.HiddenInput)
class FormWithVisibleAndHiddenField(forms.ModelForm):
first = forms.CharField(widget=forms.HiddenInput)
second = forms.CharField()
class EmptyModelVisibleAdmin(admin.ModelAdmin):
form = FormWithoutHiddenField
fieldsets = (
(
None,
{
"fields": (("first", "second"),),
},
),
)
class EmptyModelHiddenAdmin(admin.ModelAdmin):
form = FormWithoutVisibleField
fieldsets = EmptyModelVisibleAdmin.fieldsets
class EmptyModelMixinAdmin(admin.ModelAdmin):
form = FormWithVisibleAndHiddenField
fieldsets = EmptyModelVisibleAdmin.fieldsets
class CityInlineAdmin(admin.TabularInline):
model = City
view_on_site = False
class StateAdminForm(forms.ModelForm):
nolabel_form_field = forms.BooleanField(required=False)
class Meta:
model = State
fields = "__all__"
labels = {"name": "State name (from form’s Meta.labels)"}
@property
def changed_data(self):
data = super().changed_data
if data:
# Add arbitrary name to changed_data to test
# change message construction.
return data + ["not_a_form_field"]
return data
class StateAdmin(admin.ModelAdmin):
inlines = [CityInlineAdmin]
form = StateAdminForm
class RestaurantInlineAdmin(admin.TabularInline):
model = Restaurant
view_on_site = True
class CityAdmin(admin.ModelAdmin):
inlines = [RestaurantInlineAdmin]
view_on_site = True
def get_formset_kwargs(self, request, obj, inline, prefix):
return {
**super().get_formset_kwargs(request, obj, inline, prefix),
"form_kwargs": {"initial": {"name": "overridden_name"}},
}
class WorkerAdmin(admin.ModelAdmin):
def view_on_site(self, obj):
return "/worker/%s/%s/" % (obj.surname, obj.name)
class WorkerInlineAdmin(admin.TabularInline):
model = Worker
def view_on_site(self, obj):
return "/worker_inline/%s/%s/" % (obj.surname, obj.name)
class RestaurantAdmin(admin.ModelAdmin):
inlines = [WorkerInlineAdmin]
view_on_site = False
def get_changeform_initial_data(self, request):
return {"name": "overridden_value"}
class FunkyTagAdmin(admin.ModelAdmin):
list_display = ("name", "content_object")
class InlineReferenceInline(admin.TabularInline):
model = InlineReference
class InlineRefererAdmin(admin.ModelAdmin):
inlines = [InlineReferenceInline]
class PlotReadonlyAdmin(admin.ModelAdmin):
readonly_fields = ("plotdetails",)
class GetFormsetsArgumentCheckingAdmin(admin.ModelAdmin):
fields = ["name"]
def add_view(self, request, *args, **kwargs):
request.is_add_view = True
return super().add_view(request, *args, **kwargs)
def change_view(self, request, *args, **kwargs):
request.is_add_view = False
return super().change_view(request, *args, **kwargs)
def get_formsets_with_inlines(self, request, obj=None):
if request.is_add_view and obj is not None:
raise Exception(
"'obj' passed to get_formsets_with_inlines wasn't None during add_view"
)
if not request.is_add_view and obj is None:
raise Exception(
"'obj' passed to get_formsets_with_inlines was None during change_view"
)
return super().get_formsets_with_inlines(request, obj)
site = admin.AdminSite(name="admin")
site.site_url = "/my-site-url/"
site.register(Article, ArticleAdmin)
site.register(CustomArticle, CustomArticleAdmin)
site.register(
Section,
save_as=True,
inlines=[ArticleInline],
readonly_fields=["name_property"],
search_fields=["name"],
)
site.register(ModelWithStringPrimaryKey)
site.register(Color)
site.register(Thing, ThingAdmin)
site.register(Actor)
site.register(Inquisition, InquisitionAdmin)
site.register(Sketch, SketchAdmin)
site.register(Person, PersonAdmin)
site.register(Persona, PersonaAdmin)
site.register(Subscriber, SubscriberAdmin)
site.register(ExternalSubscriber, ExternalSubscriberAdmin)
site.register(OldSubscriber, OldSubscriberAdmin)
site.register(Podcast, PodcastAdmin)
site.register(Vodcast, VodcastAdmin)
site.register(Parent, ParentAdmin)
site.register(EmptyModel, EmptyModelAdmin)
site.register(Fabric, FabricAdmin)
site.register(Gallery, GalleryAdmin)
site.register(Picture, PictureAdmin)
site.register(Language, LanguageAdmin)
site.register(Recommendation, RecommendationAdmin)
site.register(Recommender)
site.register(Collector, CollectorAdmin)
site.register(Category, CategoryAdmin)
site.register(Post, PostAdmin)
site.register(FieldOverridePost, FieldOverridePostAdmin)
site.register(Gadget, GadgetAdmin)
site.register(Villain)
site.register(SuperVillain)
site.register(Plot)
site.register(PlotDetails)
site.register(PlotProxy, PlotReadonlyAdmin)
site.register(Bookmark)
site.register(CyclicOne)
site.register(CyclicTwo)
site.register(WorkHour, WorkHourAdmin)
site.register(Reservation)
site.register(FoodDelivery, FoodDeliveryAdmin)
site.register(RowLevelChangePermissionModel, RowLevelChangePermissionModelAdmin)
site.register(Paper, PaperAdmin)
site.register(CoverLetter, CoverLetterAdmin)
site.register(ShortMessage, ShortMessageAdmin)
site.register(Telegram, TelegramAdmin)
site.register(Story, StoryAdmin)
site.register(OtherStory, OtherStoryAdmin)
site.register(Report, ReportAdmin)
site.register(MainPrepopulated, MainPrepopulatedAdmin)
site.register(UnorderedObject, UnorderedObjectAdmin)
site.register(UndeletableObject, UndeletableObjectAdmin)
site.register(UnchangeableObject, UnchangeableObjectAdmin)
site.register(State, StateAdmin)
site.register(City, CityAdmin)
site.register(Restaurant, RestaurantAdmin)
site.register(Worker, WorkerAdmin)
site.register(FunkyTag, FunkyTagAdmin)
site.register(ReferencedByParent)
site.register(ChildOfReferer)
site.register(ReferencedByInline)
site.register(InlineReferer, InlineRefererAdmin)
site.register(ReferencedByGenRel)
site.register(GenRelReference)
site.register(ParentWithUUIDPK)
site.register(RelatedPrepopulated, search_fields=["name"])
site.register(RelatedWithUUIDPKModel)
site.register(ReadOnlyRelatedField, ReadOnlyRelatedFieldAdmin)
# We intentionally register Promo and ChapterXtra1 but not Chapter nor ChapterXtra2.
# That way we cover all four cases:
# related ForeignKey object registered in admin
# related ForeignKey object not registered in admin
# related OneToOne object registered in admin
# related OneToOne object not registered in admin
# when deleting Book so as exercise all four paths through
# contrib.admin.utils's get_deleted_objects function.
site.register(Book, inlines=[ChapterInline])
site.register(Promo)
site.register(ChapterXtra1, ChapterXtra1Admin)
site.register(Pizza, PizzaAdmin)
site.register(ReadOnlyPizza, ReadOnlyPizzaAdmin)
site.register(ReadablePizza)
site.register(Topping, ToppingAdmin)
site.register(Album, AlbumAdmin)
site.register(Song)
site.register(Question, QuestionAdmin)
site.register(Answer, AnswerAdmin, date_hierarchy="question__posted")
site.register(Answer2, date_hierarchy="question__expires")
site.register(PrePopulatedPost, PrePopulatedPostAdmin)
site.register(ComplexSortedPerson, ComplexSortedPersonAdmin)
site.register(FilteredManager, CustomManagerAdmin)
site.register(PluggableSearchPerson, PluggableSearchPersonAdmin)
site.register(PrePopulatedPostLargeSlug, PrePopulatedPostLargeSlugAdmin)
site.register(AdminOrderedField, AdminOrderedFieldAdmin)
site.register(AdminOrderedModelMethod, AdminOrderedModelMethodAdmin)
site.register(AdminOrderedAdminMethod, AdminOrderedAdminMethodAdmin)
site.register(AdminOrderedCallable, AdminOrderedCallableAdmin)
site.register(Color2, CustomTemplateFilterColorAdmin)
site.register(Simple, AttributeErrorRaisingAdmin)
site.register(UserMessenger, MessageTestingAdmin)
site.register(Choice, ChoiceList)
site.register(ParentWithDependentChildren, ParentWithDependentChildrenAdmin)
site.register(EmptyModelHidden, EmptyModelHiddenAdmin)
site.register(EmptyModelVisible, EmptyModelVisibleAdmin)
site.register(EmptyModelMixin, EmptyModelMixinAdmin)
site.register(StumpJoke)
site.register(Recipe)
site.register(Ingredient)
site.register(NotReferenced)
site.register(ExplicitlyProvidedPK, GetFormsetsArgumentCheckingAdmin)
site.register(ImplicitlyGeneratedPK, GetFormsetsArgumentCheckingAdmin)
site.register(UserProxy)
site.register(Box)
site.register(Country)
site.register(Traveler)
# Register core models we need in our tests
site.register(User, UserAdmin)
site.register(Group, GroupAdmin)
# Used to test URL namespaces
site2 = admin.AdminSite(name="namespaced_admin")
site2.register(User, UserAdmin)
site2.register(Group, GroupAdmin)
site2.register(ParentWithUUIDPK)
site2.register(
RelatedWithUUIDPKModel,
list_display=["pk", "parent"],
list_editable=["parent"],
raw_id_fields=["parent"],
)
site2.register(Person, save_as_continue=False)
site2.register(ReadOnlyRelatedField, ReadOnlyRelatedFieldAdmin)
site2.register(Language)
site7 = admin.AdminSite(name="admin7")
site7.register(Article, ArticleAdmin2)
site7.register(Section)
site7.register(PrePopulatedPost, PrePopulatedPostReadOnlyAdmin)
site7.register(
Pizza,
filter_horizontal=["toppings"],
fieldsets=(
(
"Collapsible",
{
"classes": ["collapse"],
"fields": ["toppings"],
},
),
),
)
site7.register(
Question,
filter_horizontal=["related_questions"],
fieldsets=(
(
"Not collapsible",
{
"fields": ["related_questions"],
},
),
),
)
# Used to test ModelAdmin.sortable_by and get_sortable_by().
class ArticleAdmin6(admin.ModelAdmin):
list_display = (
"content",
"date",
callable_year,
"model_year",
"modeladmin_year",
"model_year_reversed",
"section",
)
sortable_by = ("date", callable_year)
@admin.display(ordering="date")
def modeladmin_year(self, obj):
return obj.date.year
class ActorAdmin6(admin.ModelAdmin):
list_display = ("name", "age")
sortable_by = ("name",)
def get_sortable_by(self, request):
return ("age",)
class ChapterAdmin6(admin.ModelAdmin):
list_display = ("title", "book")
sortable_by = ()
class ColorAdmin6(admin.ModelAdmin):
list_display = ("value",)
def get_sortable_by(self, request):
return ()
site6 = admin.AdminSite(name="admin6")
site6.register(Article, ArticleAdmin6)
site6.register(Actor, ActorAdmin6)
site6.register(Chapter, ChapterAdmin6)
site6.register(Color, ColorAdmin6)
class ArticleAdmin9(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
# Simulate that the user can't change a specific object.
return obj is None
class ActorAdmin9(admin.ModelAdmin):
def get_urls(self):
# Opt-out of append slash for single model.
urls = super().get_urls()
for pattern in urls:
pattern.callback = no_append_slash(pattern.callback)
return urls
site9 = admin.AdminSite(name="admin9")
site9.register(Article, ArticleAdmin9)
site9.register(Actor, ActorAdmin9)
site10 = admin.AdminSite(name="admin10")
site10.final_catch_all_view = False
site10.register(Article, ArticleAdminWithExtraUrl)
|
30f1eac7ee87c26222aa91a5e19d07e875d9ed3d3e393d22be982e527c555d4f | from unittest import mock
from django.contrib import admin
from django.contrib.auth.models import User
from django.test import TestCase, override_settings
from django.urls import path, reverse
from .models import Book
class Router:
target_db = None
def db_for_read(self, model, **hints):
return self.target_db
db_for_write = db_for_read
def allow_relation(self, obj1, obj2, **hints):
return True
site = admin.AdminSite(name="test_adminsite")
site.register(Book)
urlpatterns = [
path("admin/", site.urls),
]
@override_settings(ROOT_URLCONF=__name__, DATABASE_ROUTERS=["%s.Router" % __name__])
class MultiDatabaseTests(TestCase):
databases = {"default", "other"}
@classmethod
def setUpTestData(cls):
cls.superusers = {}
cls.test_book_ids = {}
for db in cls.databases:
Router.target_db = db
cls.superusers[db] = User.objects.create_superuser(
username="admin",
password="something",
email="[email protected]",
)
b = Book(name="Test Book")
b.save(using=db)
cls.test_book_ids[db] = b.id
@mock.patch("django.contrib.admin.options.transaction")
def test_add_view(self, mock):
for db in self.databases:
with self.subTest(db=db):
Router.target_db = db
self.client.force_login(self.superusers[db])
self.client.post(
reverse("test_adminsite:admin_views_book_add"),
{"name": "Foobar: 5th edition"},
)
mock.atomic.assert_called_with(using=db)
@mock.patch("django.contrib.admin.options.transaction")
def test_change_view(self, mock):
for db in self.databases:
with self.subTest(db=db):
Router.target_db = db
self.client.force_login(self.superusers[db])
self.client.post(
reverse(
"test_adminsite:admin_views_book_change",
args=[self.test_book_ids[db]],
),
{"name": "Test Book 2: Test more"},
)
mock.atomic.assert_called_with(using=db)
@mock.patch("django.contrib.admin.options.transaction")
def test_delete_view(self, mock):
for db in self.databases:
with self.subTest(db=db):
Router.target_db = db
self.client.force_login(self.superusers[db])
self.client.post(
reverse(
"test_adminsite:admin_views_book_delete",
args=[self.test_book_ids[db]],
),
{"post": "yes"},
)
mock.atomic.assert_called_with(using=db)
|
c391138285d60f3f7fa1915de9b9712864d6c39ba24db96290cb294798d550e6 | import datetime
import sys
import unittest
from django.contrib.admin import (
AllValuesFieldListFilter,
BooleanFieldListFilter,
EmptyFieldListFilter,
FieldListFilter,
ModelAdmin,
RelatedOnlyFieldListFilter,
SimpleListFilter,
site,
)
from django.contrib.admin.options import IncorrectLookupParameters
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from django.test import RequestFactory, TestCase, override_settings
from .models import Book, Bookmark, Department, Employee, ImprovedBook, TaggedItem
def select_by(dictlist, key, value):
return [x for x in dictlist if x[key] == value][0]
class DecadeListFilter(SimpleListFilter):
def lookups(self, request, model_admin):
return (
("the 80s", "the 1980's"),
("the 90s", "the 1990's"),
("the 00s", "the 2000's"),
("other", "other decades"),
)
def queryset(self, request, queryset):
decade = self.value()
if decade == "the 80s":
return queryset.filter(year__gte=1980, year__lte=1989)
if decade == "the 90s":
return queryset.filter(year__gte=1990, year__lte=1999)
if decade == "the 00s":
return queryset.filter(year__gte=2000, year__lte=2009)
class NotNinetiesListFilter(SimpleListFilter):
title = "Not nineties books"
parameter_name = "book_year"
def lookups(self, request, model_admin):
return (("the 90s", "the 1990's"),)
def queryset(self, request, queryset):
if self.value() == "the 90s":
return queryset.filter(year__gte=1990, year__lte=1999)
else:
return queryset.exclude(year__gte=1990, year__lte=1999)
class DecadeListFilterWithTitleAndParameter(DecadeListFilter):
title = "publication decade"
parameter_name = "publication-decade"
class DecadeListFilterWithoutTitle(DecadeListFilter):
parameter_name = "publication-decade"
class DecadeListFilterWithoutParameter(DecadeListFilter):
title = "publication decade"
class DecadeListFilterWithNoneReturningLookups(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
pass
class DecadeListFilterWithFailingQueryset(DecadeListFilterWithTitleAndParameter):
def queryset(self, request, queryset):
raise 1 / 0
class DecadeListFilterWithQuerysetBasedLookups(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
qs = model_admin.get_queryset(request)
if qs.filter(year__gte=1980, year__lte=1989).exists():
yield ("the 80s", "the 1980's")
if qs.filter(year__gte=1990, year__lte=1999).exists():
yield ("the 90s", "the 1990's")
if qs.filter(year__gte=2000, year__lte=2009).exists():
yield ("the 00s", "the 2000's")
class DecadeListFilterParameterEndsWith__In(DecadeListFilter):
title = "publication decade"
parameter_name = "decade__in" # Ends with '__in"
class DecadeListFilterParameterEndsWith__Isnull(DecadeListFilter):
title = "publication decade"
parameter_name = "decade__isnull" # Ends with '__isnull"
class DepartmentListFilterLookupWithNonStringValue(SimpleListFilter):
title = "department"
parameter_name = "department"
def lookups(self, request, model_admin):
return sorted(
{
(
employee.department.id, # Intentionally not a string (Refs #19318)
employee.department.code,
)
for employee in model_admin.get_queryset(request)
}
)
def queryset(self, request, queryset):
if self.value():
return queryset.filter(department__id=self.value())
class DepartmentListFilterLookupWithUnderscoredParameter(
DepartmentListFilterLookupWithNonStringValue
):
parameter_name = "department__whatever"
class DepartmentListFilterLookupWithDynamicValue(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
if self.value() == "the 80s":
return (("the 90s", "the 1990's"),)
elif self.value() == "the 90s":
return (("the 80s", "the 1980's"),)
else:
return (
("the 80s", "the 1980's"),
("the 90s", "the 1990's"),
)
class EmployeeNameCustomDividerFilter(FieldListFilter):
list_separator = "|"
def __init__(self, field, request, params, model, model_admin, field_path):
self.lookup_kwarg = "%s__in" % field_path
super().__init__(field, request, params, model, model_admin, field_path)
def expected_parameters(self):
return [self.lookup_kwarg]
class CustomUserAdmin(UserAdmin):
list_filter = ("books_authored", "books_contributed")
class BookAdmin(ModelAdmin):
list_filter = (
"year",
"author",
"contributors",
"is_best_seller",
"date_registered",
"no",
"availability",
)
ordering = ("-id",)
class BookAdminWithTupleBooleanFilter(BookAdmin):
list_filter = (
"year",
"author",
"contributors",
("is_best_seller", BooleanFieldListFilter),
"date_registered",
"no",
("availability", BooleanFieldListFilter),
)
class BookAdminWithUnderscoreLookupAndTuple(BookAdmin):
list_filter = (
"year",
("author__email", AllValuesFieldListFilter),
"contributors",
"is_best_seller",
"date_registered",
"no",
)
class BookAdminWithCustomQueryset(ModelAdmin):
def __init__(self, user, *args, **kwargs):
self.user = user
super().__init__(*args, **kwargs)
list_filter = ("year",)
def get_queryset(self, request):
return super().get_queryset(request).filter(author=self.user)
class BookAdminRelatedOnlyFilter(ModelAdmin):
list_filter = (
"year",
"is_best_seller",
"date_registered",
"no",
("author", RelatedOnlyFieldListFilter),
("contributors", RelatedOnlyFieldListFilter),
("employee__department", RelatedOnlyFieldListFilter),
)
ordering = ("-id",)
class DecadeFilterBookAdmin(ModelAdmin):
list_filter = ("author", DecadeListFilterWithTitleAndParameter)
ordering = ("-id",)
class NotNinetiesListFilterAdmin(ModelAdmin):
list_filter = (NotNinetiesListFilter,)
class DecadeFilterBookAdminWithoutTitle(ModelAdmin):
list_filter = (DecadeListFilterWithoutTitle,)
class DecadeFilterBookAdminWithoutParameter(ModelAdmin):
list_filter = (DecadeListFilterWithoutParameter,)
class DecadeFilterBookAdminWithNoneReturningLookups(ModelAdmin):
list_filter = (DecadeListFilterWithNoneReturningLookups,)
class DecadeFilterBookAdminWithFailingQueryset(ModelAdmin):
list_filter = (DecadeListFilterWithFailingQueryset,)
class DecadeFilterBookAdminWithQuerysetBasedLookups(ModelAdmin):
list_filter = (DecadeListFilterWithQuerysetBasedLookups,)
class DecadeFilterBookAdminParameterEndsWith__In(ModelAdmin):
list_filter = (DecadeListFilterParameterEndsWith__In,)
class DecadeFilterBookAdminParameterEndsWith__Isnull(ModelAdmin):
list_filter = (DecadeListFilterParameterEndsWith__Isnull,)
class EmployeeAdmin(ModelAdmin):
list_display = ["name", "department"]
list_filter = ["department"]
class EmployeeCustomDividerFilterAdmin(EmployeeAdmin):
list_filter = [
("name", EmployeeNameCustomDividerFilter),
]
class DepartmentFilterEmployeeAdmin(EmployeeAdmin):
list_filter = [DepartmentListFilterLookupWithNonStringValue]
class DepartmentFilterUnderscoredEmployeeAdmin(EmployeeAdmin):
list_filter = [DepartmentListFilterLookupWithUnderscoredParameter]
class DepartmentFilterDynamicValueBookAdmin(EmployeeAdmin):
list_filter = [DepartmentListFilterLookupWithDynamicValue]
class BookmarkAdminGenericRelation(ModelAdmin):
list_filter = ["tags__tag"]
class BookAdminWithEmptyFieldListFilter(ModelAdmin):
list_filter = [
("author", EmptyFieldListFilter),
("title", EmptyFieldListFilter),
("improvedbook", EmptyFieldListFilter),
]
class DepartmentAdminWithEmptyFieldListFilter(ModelAdmin):
list_filter = [
("description", EmptyFieldListFilter),
("employee", EmptyFieldListFilter),
]
class ListFiltersTests(TestCase):
request_factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.today = datetime.date.today()
cls.tomorrow = cls.today + datetime.timedelta(days=1)
cls.one_week_ago = cls.today - datetime.timedelta(days=7)
if cls.today.month == 12:
cls.next_month = cls.today.replace(year=cls.today.year + 1, month=1, day=1)
else:
cls.next_month = cls.today.replace(month=cls.today.month + 1, day=1)
cls.next_year = cls.today.replace(year=cls.today.year + 1, month=1, day=1)
# Users
cls.alfred = User.objects.create_superuser(
"alfred", "[email protected]", "password"
)
cls.bob = User.objects.create_user("bob", "[email protected]")
cls.lisa = User.objects.create_user("lisa", "[email protected]")
# Books
cls.djangonaut_book = Book.objects.create(
title="Djangonaut: an art of living",
year=2009,
author=cls.alfred,
is_best_seller=True,
date_registered=cls.today,
availability=True,
)
cls.bio_book = Book.objects.create(
title="Django: a biography",
year=1999,
author=cls.alfred,
is_best_seller=False,
no=207,
availability=False,
)
cls.django_book = Book.objects.create(
title="The Django Book",
year=None,
author=cls.bob,
is_best_seller=None,
date_registered=cls.today,
no=103,
availability=True,
)
cls.guitar_book = Book.objects.create(
title="Guitar for dummies",
year=2002,
is_best_seller=True,
date_registered=cls.one_week_ago,
availability=None,
)
cls.guitar_book.contributors.set([cls.bob, cls.lisa])
# Departments
cls.dev = Department.objects.create(code="DEV", description="Development")
cls.design = Department.objects.create(code="DSN", description="Design")
# Employees
cls.john = Employee.objects.create(name="John Blue", department=cls.dev)
cls.jack = Employee.objects.create(name="Jack Red", department=cls.design)
def test_choicesfieldlistfilter_has_none_choice(self):
"""
The last choice is for the None value.
"""
class BookmarkChoicesAdmin(ModelAdmin):
list_display = ["none_or_null"]
list_filter = ["none_or_null"]
modeladmin = BookmarkChoicesAdmin(Bookmark, site)
request = self.request_factory.get("/", {})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]["display"], "None")
self.assertEqual(choices[-1]["query_string"], "?none_or_null__isnull=True")
def test_datefieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist(request)
request = self.request_factory.get(
"/",
{"date_registered__gte": self.today, "date_registered__lt": self.tomorrow},
)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(filterspec.title, "date registered")
choice = select_by(filterspec.choices(changelist), "display", "Today")
self.assertIs(choice["selected"], True)
self.assertEqual(
choice["query_string"],
"?date_registered__gte=%s&date_registered__lt=%s"
% (
self.today,
self.tomorrow,
),
)
request = self.request_factory.get(
"/",
{
"date_registered__gte": self.today.replace(day=1),
"date_registered__lt": self.next_month,
},
)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
if (self.today.year, self.today.month) == (
self.one_week_ago.year,
self.one_week_ago.month,
):
# In case one week ago is in the same month.
self.assertEqual(
list(queryset),
[self.guitar_book, self.django_book, self.djangonaut_book],
)
else:
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(filterspec.title, "date registered")
choice = select_by(filterspec.choices(changelist), "display", "This month")
self.assertIs(choice["selected"], True)
self.assertEqual(
choice["query_string"],
"?date_registered__gte=%s&date_registered__lt=%s"
% (
self.today.replace(day=1),
self.next_month,
),
)
request = self.request_factory.get(
"/",
{
"date_registered__gte": self.today.replace(month=1, day=1),
"date_registered__lt": self.next_year,
},
)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
if self.today.year == self.one_week_ago.year:
# In case one week ago is in the same year.
self.assertEqual(
list(queryset),
[self.guitar_book, self.django_book, self.djangonaut_book],
)
else:
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(filterspec.title, "date registered")
choice = select_by(filterspec.choices(changelist), "display", "This year")
self.assertIs(choice["selected"], True)
self.assertEqual(
choice["query_string"],
"?date_registered__gte=%s&date_registered__lt=%s"
% (
self.today.replace(month=1, day=1),
self.next_year,
),
)
request = self.request_factory.get(
"/",
{
"date_registered__gte": str(self.one_week_ago),
"date_registered__lt": str(self.tomorrow),
},
)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(
list(queryset), [self.guitar_book, self.django_book, self.djangonaut_book]
)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(filterspec.title, "date registered")
choice = select_by(filterspec.choices(changelist), "display", "Past 7 days")
self.assertIs(choice["selected"], True)
self.assertEqual(
choice["query_string"],
"?date_registered__gte=%s&date_registered__lt=%s"
% (
str(self.one_week_ago),
str(self.tomorrow),
),
)
# Null/not null queries
request = self.request_factory.get("/", {"date_registered__isnull": "True"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(queryset.count(), 1)
self.assertEqual(queryset[0], self.bio_book)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(filterspec.title, "date registered")
choice = select_by(filterspec.choices(changelist), "display", "No date")
self.assertIs(choice["selected"], True)
self.assertEqual(choice["query_string"], "?date_registered__isnull=True")
request = self.request_factory.get("/", {"date_registered__isnull": "False"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(queryset.count(), 3)
self.assertEqual(
list(queryset), [self.guitar_book, self.django_book, self.djangonaut_book]
)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(filterspec.title, "date registered")
choice = select_by(filterspec.choices(changelist), "display", "Has date")
self.assertIs(choice["selected"], True)
self.assertEqual(choice["query_string"], "?date_registered__isnull=False")
@unittest.skipIf(
sys.platform == "win32",
"Windows doesn't support setting a timezone that differs from the "
"system timezone.",
)
@override_settings(USE_TZ=True)
def test_datefieldlistfilter_with_time_zone_support(self):
# Regression for #17830
self.test_datefieldlistfilter()
def test_allvaluesfieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get("/", {"year__isnull": "True"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(filterspec.title, "year")
choices = list(filterspec.choices(changelist))
self.assertIs(choices[-1]["selected"], True)
self.assertEqual(choices[-1]["query_string"], "?year__isnull=True")
request = self.request_factory.get("/", {"year": "2002"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(filterspec.title, "year")
choices = list(filterspec.choices(changelist))
self.assertIs(choices[2]["selected"], True)
self.assertEqual(choices[2]["query_string"], "?year=2002")
def test_allvaluesfieldlistfilter_custom_qs(self):
# Make sure that correct filters are returned with custom querysets
modeladmin = BookAdminWithCustomQueryset(self.alfred, Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
choices = list(filterspec.choices(changelist))
# Should have 'All', 1999 and 2009 options i.e. the subset of years of
# books written by alfred (which is the filtering criteria set by
# BookAdminWithCustomQueryset.get_queryset())
self.assertEqual(3, len(choices))
self.assertEqual(choices[0]["query_string"], "?")
self.assertEqual(choices[1]["query_string"], "?year=1999")
self.assertEqual(choices[2]["query_string"], "?year=2009")
def test_relatedfieldlistfilter_foreignkey(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure that all users are present in the author's list filter
filterspec = changelist.get_filters(request)[0][1]
expected = [
(self.alfred.pk, "alfred"),
(self.bob.pk, "bob"),
(self.lisa.pk, "lisa"),
]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
request = self.request_factory.get("/", {"author__isnull": "True"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.guitar_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(filterspec.title, "Verbose Author")
choices = list(filterspec.choices(changelist))
self.assertIs(choices[-1]["selected"], True)
self.assertEqual(choices[-1]["query_string"], "?author__isnull=True")
request = self.request_factory.get("/", {"author__id__exact": self.alfred.pk})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(filterspec.title, "Verbose Author")
# order of choices depends on User model, which has no order
choice = select_by(filterspec.choices(changelist), "display", "alfred")
self.assertIs(choice["selected"], True)
self.assertEqual(
choice["query_string"], "?author__id__exact=%d" % self.alfred.pk
)
def test_relatedfieldlistfilter_foreignkey_ordering(self):
"""RelatedFieldListFilter ordering respects ModelAdmin.ordering."""
class EmployeeAdminWithOrdering(ModelAdmin):
ordering = ("name",)
class BookAdmin(ModelAdmin):
list_filter = ("employee",)
site.register(Employee, EmployeeAdminWithOrdering)
self.addCleanup(lambda: site.unregister(Employee))
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
expected = [(self.jack.pk, "Jack Red"), (self.john.pk, "John Blue")]
self.assertEqual(filterspec.lookup_choices, expected)
def test_relatedfieldlistfilter_foreignkey_ordering_reverse(self):
class EmployeeAdminWithOrdering(ModelAdmin):
ordering = ("-name",)
class BookAdmin(ModelAdmin):
list_filter = ("employee",)
site.register(Employee, EmployeeAdminWithOrdering)
self.addCleanup(lambda: site.unregister(Employee))
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
expected = [(self.john.pk, "John Blue"), (self.jack.pk, "Jack Red")]
self.assertEqual(filterspec.lookup_choices, expected)
def test_relatedfieldlistfilter_foreignkey_default_ordering(self):
"""RelatedFieldListFilter ordering respects Model.ordering."""
class BookAdmin(ModelAdmin):
list_filter = ("employee",)
self.addCleanup(setattr, Employee._meta, "ordering", Employee._meta.ordering)
Employee._meta.ordering = ("name",)
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
expected = [(self.jack.pk, "Jack Red"), (self.john.pk, "John Blue")]
self.assertEqual(filterspec.lookup_choices, expected)
def test_relatedfieldlistfilter_manytomany(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure that all users are present in the contrib's list filter
filterspec = changelist.get_filters(request)[0][2]
expected = [
(self.alfred.pk, "alfred"),
(self.bob.pk, "bob"),
(self.lisa.pk, "lisa"),
]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
request = self.request_factory.get("/", {"contributors__isnull": "True"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(
list(queryset), [self.django_book, self.bio_book, self.djangonaut_book]
)
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][2]
self.assertEqual(filterspec.title, "Verbose Contributors")
choices = list(filterspec.choices(changelist))
self.assertIs(choices[-1]["selected"], True)
self.assertEqual(choices[-1]["query_string"], "?contributors__isnull=True")
request = self.request_factory.get(
"/", {"contributors__id__exact": self.bob.pk}
)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][2]
self.assertEqual(filterspec.title, "Verbose Contributors")
choice = select_by(filterspec.choices(changelist), "display", "bob")
self.assertIs(choice["selected"], True)
self.assertEqual(
choice["query_string"], "?contributors__id__exact=%d" % self.bob.pk
)
def test_relatedfieldlistfilter_reverse_relationships(self):
modeladmin = CustomUserAdmin(User, site)
# FK relationship -----
request = self.request_factory.get("/", {"books_authored__isnull": "True"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.lisa])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(filterspec.title, "book")
choices = list(filterspec.choices(changelist))
self.assertIs(choices[-1]["selected"], True)
self.assertEqual(choices[-1]["query_string"], "?books_authored__isnull=True")
request = self.request_factory.get(
"/", {"books_authored__id__exact": self.bio_book.pk}
)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(filterspec.title, "book")
choice = select_by(
filterspec.choices(changelist), "display", self.bio_book.title
)
self.assertIs(choice["selected"], True)
self.assertEqual(
choice["query_string"], "?books_authored__id__exact=%d" % self.bio_book.pk
)
# M2M relationship -----
request = self.request_factory.get("/", {"books_contributed__isnull": "True"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.alfred])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(filterspec.title, "book")
choices = list(filterspec.choices(changelist))
self.assertIs(choices[-1]["selected"], True)
self.assertEqual(choices[-1]["query_string"], "?books_contributed__isnull=True")
request = self.request_factory.get(
"/", {"books_contributed__id__exact": self.django_book.pk}
)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(filterspec.title, "book")
choice = select_by(
filterspec.choices(changelist), "display", self.django_book.title
)
self.assertIs(choice["selected"], True)
self.assertEqual(
choice["query_string"],
"?books_contributed__id__exact=%d" % self.django_book.pk,
)
# With one book, the list filter should appear because there is also a
# (None) option.
Book.objects.exclude(pk=self.djangonaut_book.pk).delete()
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 2)
# With no books remaining, no list filters should appear.
Book.objects.all().delete()
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 0)
def test_relatedfieldlistfilter_reverse_relationships_default_ordering(self):
self.addCleanup(setattr, Book._meta, "ordering", Book._meta.ordering)
Book._meta.ordering = ("title",)
modeladmin = CustomUserAdmin(User, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
expected = [
(self.bio_book.pk, "Django: a biography"),
(self.djangonaut_book.pk, "Djangonaut: an art of living"),
(self.guitar_book.pk, "Guitar for dummies"),
(self.django_book.pk, "The Django Book"),
]
self.assertEqual(filterspec.lookup_choices, expected)
def test_relatedonlyfieldlistfilter_foreignkey(self):
modeladmin = BookAdminRelatedOnlyFilter(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure that only actual authors are present in author's list filter
filterspec = changelist.get_filters(request)[0][4]
expected = [(self.alfred.pk, "alfred"), (self.bob.pk, "bob")]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
def test_relatedonlyfieldlistfilter_foreignkey_reverse_relationships(self):
class EmployeeAdminReverseRelationship(ModelAdmin):
list_filter = (("book", RelatedOnlyFieldListFilter),)
self.djangonaut_book.employee = self.john
self.djangonaut_book.save()
self.django_book.employee = self.jack
self.django_book.save()
modeladmin = EmployeeAdminReverseRelationship(Employee, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
self.assertCountEqual(
filterspec.lookup_choices,
[
(self.djangonaut_book.pk, "Djangonaut: an art of living"),
(self.django_book.pk, "The Django Book"),
],
)
def test_relatedonlyfieldlistfilter_manytomany_reverse_relationships(self):
class UserAdminReverseRelationship(ModelAdmin):
list_filter = (("books_contributed", RelatedOnlyFieldListFilter),)
modeladmin = UserAdminReverseRelationship(User, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(
filterspec.lookup_choices,
[(self.guitar_book.pk, "Guitar for dummies")],
)
def test_relatedonlyfieldlistfilter_foreignkey_ordering(self):
"""RelatedOnlyFieldListFilter ordering respects ModelAdmin.ordering."""
class EmployeeAdminWithOrdering(ModelAdmin):
ordering = ("name",)
class BookAdmin(ModelAdmin):
list_filter = (("employee", RelatedOnlyFieldListFilter),)
albert = Employee.objects.create(name="Albert Green", department=self.dev)
self.djangonaut_book.employee = albert
self.djangonaut_book.save()
self.bio_book.employee = self.jack
self.bio_book.save()
site.register(Employee, EmployeeAdminWithOrdering)
self.addCleanup(lambda: site.unregister(Employee))
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
expected = [(albert.pk, "Albert Green"), (self.jack.pk, "Jack Red")]
self.assertEqual(filterspec.lookup_choices, expected)
def test_relatedonlyfieldlistfilter_foreignkey_default_ordering(self):
"""RelatedOnlyFieldListFilter ordering respects Meta.ordering."""
class BookAdmin(ModelAdmin):
list_filter = (("employee", RelatedOnlyFieldListFilter),)
albert = Employee.objects.create(name="Albert Green", department=self.dev)
self.djangonaut_book.employee = albert
self.djangonaut_book.save()
self.bio_book.employee = self.jack
self.bio_book.save()
self.addCleanup(setattr, Employee._meta, "ordering", Employee._meta.ordering)
Employee._meta.ordering = ("name",)
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
expected = [(albert.pk, "Albert Green"), (self.jack.pk, "Jack Red")]
self.assertEqual(filterspec.lookup_choices, expected)
def test_relatedonlyfieldlistfilter_underscorelookup_foreignkey(self):
Department.objects.create(code="TEST", description="Testing")
self.djangonaut_book.employee = self.john
self.djangonaut_book.save()
self.bio_book.employee = self.jack
self.bio_book.save()
modeladmin = BookAdminRelatedOnlyFilter(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Only actual departments should be present in employee__department's
# list filter.
filterspec = changelist.get_filters(request)[0][6]
expected = [
(self.dev.code, str(self.dev)),
(self.design.code, str(self.design)),
]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
def test_relatedonlyfieldlistfilter_manytomany(self):
modeladmin = BookAdminRelatedOnlyFilter(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure that only actual contributors are present in contrib's list filter
filterspec = changelist.get_filters(request)[0][5]
expected = [(self.bob.pk, "bob"), (self.lisa.pk, "lisa")]
self.assertEqual(sorted(filterspec.lookup_choices), sorted(expected))
def test_listfilter_genericrelation(self):
django_bookmark = Bookmark.objects.create(url="https://www.djangoproject.com/")
python_bookmark = Bookmark.objects.create(url="https://www.python.org/")
kernel_bookmark = Bookmark.objects.create(url="https://www.kernel.org/")
TaggedItem.objects.create(content_object=django_bookmark, tag="python")
TaggedItem.objects.create(content_object=python_bookmark, tag="python")
TaggedItem.objects.create(content_object=kernel_bookmark, tag="linux")
modeladmin = BookmarkAdminGenericRelation(Bookmark, site)
request = self.request_factory.get("/", {"tags__tag": "python"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
queryset = changelist.get_queryset(request)
expected = [python_bookmark, django_bookmark]
self.assertEqual(list(queryset), expected)
def test_booleanfieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
self.verify_booleanfieldlistfilter(modeladmin)
def test_booleanfieldlistfilter_tuple(self):
modeladmin = BookAdminWithTupleBooleanFilter(Book, site)
self.verify_booleanfieldlistfilter(modeladmin)
def verify_booleanfieldlistfilter(self, modeladmin):
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
request = self.request_factory.get("/", {"is_best_seller__exact": 0})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(filterspec.title, "is best seller")
choice = select_by(filterspec.choices(changelist), "display", "No")
self.assertIs(choice["selected"], True)
self.assertEqual(choice["query_string"], "?is_best_seller__exact=0")
request = self.request_factory.get("/", {"is_best_seller__exact": 1})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.guitar_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(filterspec.title, "is best seller")
choice = select_by(filterspec.choices(changelist), "display", "Yes")
self.assertIs(choice["selected"], True)
self.assertEqual(choice["query_string"], "?is_best_seller__exact=1")
request = self.request_factory.get("/", {"is_best_seller__isnull": "True"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(filterspec.title, "is best seller")
choice = select_by(filterspec.choices(changelist), "display", "Unknown")
self.assertIs(choice["selected"], True)
self.assertEqual(choice["query_string"], "?is_best_seller__isnull=True")
def test_booleanfieldlistfilter_choices(self):
modeladmin = BookAdmin(Book, site)
self.verify_booleanfieldlistfilter_choices(modeladmin)
def test_booleanfieldlistfilter_tuple_choices(self):
modeladmin = BookAdminWithTupleBooleanFilter(Book, site)
self.verify_booleanfieldlistfilter_choices(modeladmin)
def verify_booleanfieldlistfilter_choices(self, modeladmin):
# False.
request = self.request_factory.get("/", {"availability__exact": 0})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
filterspec = changelist.get_filters(request)[0][6]
self.assertEqual(filterspec.title, "availability")
choice = select_by(filterspec.choices(changelist), "display", "Paid")
self.assertIs(choice["selected"], True)
self.assertEqual(choice["query_string"], "?availability__exact=0")
# True.
request = self.request_factory.get("/", {"availability__exact": 1})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
filterspec = changelist.get_filters(request)[0][6]
self.assertEqual(filterspec.title, "availability")
choice = select_by(filterspec.choices(changelist), "display", "Free")
self.assertIs(choice["selected"], True)
self.assertEqual(choice["query_string"], "?availability__exact=1")
# None.
request = self.request_factory.get("/", {"availability__isnull": "True"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.guitar_book])
filterspec = changelist.get_filters(request)[0][6]
self.assertEqual(filterspec.title, "availability")
choice = select_by(filterspec.choices(changelist), "display", "Obscure")
self.assertIs(choice["selected"], True)
self.assertEqual(choice["query_string"], "?availability__isnull=True")
# All.
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
queryset = changelist.get_queryset(request)
self.assertEqual(
list(queryset),
[self.guitar_book, self.django_book, self.bio_book, self.djangonaut_book],
)
filterspec = changelist.get_filters(request)[0][6]
self.assertEqual(filterspec.title, "availability")
choice = select_by(filterspec.choices(changelist), "display", "All")
self.assertIs(choice["selected"], True)
self.assertEqual(choice["query_string"], "?")
def test_fieldlistfilter_underscorelookup_tuple(self):
"""
Ensure ('fieldpath', ClassName ) lookups pass lookup_allowed checks
when fieldpath contains double underscore in value (#19182).
"""
modeladmin = BookAdminWithUnderscoreLookupAndTuple(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
request = self.request_factory.get("/", {"author__email": "[email protected]"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book, self.djangonaut_book])
def test_fieldlistfilter_invalid_lookup_parameters(self):
"""Filtering by an invalid value."""
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get(
"/", {"author__id__exact": "StringNotInteger!"}
)
request.user = self.alfred
with self.assertRaises(IncorrectLookupParameters):
modeladmin.get_changelist_instance(request)
def test_simplelistfilter(self):
modeladmin = DecadeFilterBookAdmin(Book, site)
# Make sure that the first option is 'All' ---------------------------
request = self.request_factory.get("/", {})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), list(Book.objects.order_by("-id")))
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(filterspec.title, "publication decade")
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]["display"], "All")
self.assertIs(choices[0]["selected"], True)
self.assertEqual(choices[0]["query_string"], "?")
# Look for books in the 1980s ----------------------------------------
request = self.request_factory.get("/", {"publication-decade": "the 80s"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(filterspec.title, "publication decade")
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]["display"], "the 1980's")
self.assertIs(choices[1]["selected"], True)
self.assertEqual(choices[1]["query_string"], "?publication-decade=the+80s")
# Look for books in the 1990s ----------------------------------------
request = self.request_factory.get("/", {"publication-decade": "the 90s"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(filterspec.title, "publication decade")
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]["display"], "the 1990's")
self.assertIs(choices[2]["selected"], True)
self.assertEqual(choices[2]["query_string"], "?publication-decade=the+90s")
# Look for books in the 2000s ----------------------------------------
request = self.request_factory.get("/", {"publication-decade": "the 00s"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.guitar_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(filterspec.title, "publication decade")
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[3]["display"], "the 2000's")
self.assertIs(choices[3]["selected"], True)
self.assertEqual(choices[3]["query_string"], "?publication-decade=the+00s")
# Combine multiple filters -------------------------------------------
request = self.request_factory.get(
"/", {"publication-decade": "the 00s", "author__id__exact": self.alfred.pk}
)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.djangonaut_book])
# Make sure the correct choices are selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(filterspec.title, "publication decade")
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[3]["display"], "the 2000's")
self.assertIs(choices[3]["selected"], True)
self.assertEqual(
choices[3]["query_string"],
"?author__id__exact=%s&publication-decade=the+00s" % self.alfred.pk,
)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(filterspec.title, "Verbose Author")
choice = select_by(filterspec.choices(changelist), "display", "alfred")
self.assertIs(choice["selected"], True)
self.assertEqual(
choice["query_string"],
"?author__id__exact=%s&publication-decade=the+00s" % self.alfred.pk,
)
def test_listfilter_without_title(self):
"""
Any filter must define a title.
"""
modeladmin = DecadeFilterBookAdminWithoutTitle(Book, site)
request = self.request_factory.get("/", {})
request.user = self.alfred
msg = (
"The list filter 'DecadeListFilterWithoutTitle' does not specify a 'title'."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
modeladmin.get_changelist_instance(request)
def test_simplelistfilter_without_parameter(self):
"""
Any SimpleListFilter must define a parameter_name.
"""
modeladmin = DecadeFilterBookAdminWithoutParameter(Book, site)
request = self.request_factory.get("/", {})
request.user = self.alfred
msg = (
"The list filter 'DecadeListFilterWithoutParameter' does not specify a "
"'parameter_name'."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
modeladmin.get_changelist_instance(request)
def test_simplelistfilter_with_none_returning_lookups(self):
"""
A SimpleListFilter lookups method can return None but disables the
filter completely.
"""
modeladmin = DecadeFilterBookAdminWithNoneReturningLookups(Book, site)
request = self.request_factory.get("/", {})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 0)
def test_filter_with_failing_queryset(self):
"""
When a filter's queryset method fails, it fails loudly and
the corresponding exception doesn't get swallowed (#17828).
"""
modeladmin = DecadeFilterBookAdminWithFailingQueryset(Book, site)
request = self.request_factory.get("/", {})
request.user = self.alfred
with self.assertRaises(ZeroDivisionError):
modeladmin.get_changelist_instance(request)
def test_simplelistfilter_with_queryset_based_lookups(self):
modeladmin = DecadeFilterBookAdminWithQuerysetBasedLookups(Book, site)
request = self.request_factory.get("/", {})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(filterspec.title, "publication decade")
choices = list(filterspec.choices(changelist))
self.assertEqual(len(choices), 3)
self.assertEqual(choices[0]["display"], "All")
self.assertIs(choices[0]["selected"], True)
self.assertEqual(choices[0]["query_string"], "?")
self.assertEqual(choices[1]["display"], "the 1990's")
self.assertIs(choices[1]["selected"], False)
self.assertEqual(choices[1]["query_string"], "?publication-decade=the+90s")
self.assertEqual(choices[2]["display"], "the 2000's")
self.assertIs(choices[2]["selected"], False)
self.assertEqual(choices[2]["query_string"], "?publication-decade=the+00s")
def test_two_characters_long_field(self):
"""
list_filter works with two-characters long field names (#16080).
"""
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get("/", {"no": "207"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
filterspec = changelist.get_filters(request)[0][5]
self.assertEqual(filterspec.title, "number")
choices = list(filterspec.choices(changelist))
self.assertIs(choices[2]["selected"], True)
self.assertEqual(choices[2]["query_string"], "?no=207")
def test_parameter_ends_with__in__or__isnull(self):
"""
A SimpleListFilter's parameter name is not mistaken for a model field
if it ends with '__isnull' or '__in' (#17091).
"""
# When it ends with '__in' -----------------------------------------
modeladmin = DecadeFilterBookAdminParameterEndsWith__In(Book, site)
request = self.request_factory.get("/", {"decade__in": "the 90s"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(filterspec.title, "publication decade")
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]["display"], "the 1990's")
self.assertIs(choices[2]["selected"], True)
self.assertEqual(choices[2]["query_string"], "?decade__in=the+90s")
# When it ends with '__isnull' ---------------------------------------
modeladmin = DecadeFilterBookAdminParameterEndsWith__Isnull(Book, site)
request = self.request_factory.get("/", {"decade__isnull": "the 90s"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(filterspec.title, "publication decade")
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]["display"], "the 1990's")
self.assertIs(choices[2]["selected"], True)
self.assertEqual(choices[2]["query_string"], "?decade__isnull=the+90s")
def test_lookup_with_non_string_value(self):
"""
Ensure choices are set the selected class when using non-string values
for lookups in SimpleListFilters (#19318).
"""
modeladmin = DepartmentFilterEmployeeAdmin(Employee, site)
request = self.request_factory.get("/", {"department": self.john.department.pk})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(filterspec.title, "department")
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]["display"], "DEV")
self.assertIs(choices[1]["selected"], True)
self.assertEqual(
choices[1]["query_string"], "?department=%s" % self.john.department.pk
)
def test_lookup_with_non_string_value_underscored(self):
"""
Ensure SimpleListFilter lookups pass lookup_allowed checks when
parameter_name attribute contains double-underscore value (#19182).
"""
modeladmin = DepartmentFilterUnderscoredEmployeeAdmin(Employee, site)
request = self.request_factory.get(
"/", {"department__whatever": self.john.department.pk}
)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(filterspec.title, "department")
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]["display"], "DEV")
self.assertIs(choices[1]["selected"], True)
self.assertEqual(
choices[1]["query_string"],
"?department__whatever=%s" % self.john.department.pk,
)
def test_fk_with_to_field(self):
"""
A filter on a FK respects the FK's to_field attribute (#17972).
"""
modeladmin = EmployeeAdmin(Employee, site)
request = self.request_factory.get("/", {})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.jack, self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(filterspec.title, "department")
choices = [
(choice["display"], choice["selected"], choice["query_string"])
for choice in filterspec.choices(changelist)
]
self.assertCountEqual(
choices,
[
("All", True, "?"),
("Development", False, "?department__code__exact=DEV"),
("Design", False, "?department__code__exact=DSN"),
],
)
# Filter by Department=='Development' --------------------------------
request = self.request_factory.get("/", {"department__code__exact": "DEV"})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [self.john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(filterspec.title, "department")
choices = [
(choice["display"], choice["selected"], choice["query_string"])
for choice in filterspec.choices(changelist)
]
self.assertCountEqual(
choices,
[
("All", False, "?"),
("Development", True, "?department__code__exact=DEV"),
("Design", False, "?department__code__exact=DSN"),
],
)
def test_lookup_with_dynamic_value(self):
"""
Ensure SimpleListFilter can access self.value() inside the lookup.
"""
modeladmin = DepartmentFilterDynamicValueBookAdmin(Book, site)
def _test_choices(request, expected_displays):
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(filterspec.title, "publication decade")
choices = tuple(c["display"] for c in filterspec.choices(changelist))
self.assertEqual(choices, expected_displays)
_test_choices(
self.request_factory.get("/", {}), ("All", "the 1980's", "the 1990's")
)
_test_choices(
self.request_factory.get("/", {"publication-decade": "the 80s"}),
("All", "the 1990's"),
)
_test_choices(
self.request_factory.get("/", {"publication-decade": "the 90s"}),
("All", "the 1980's"),
)
def test_list_filter_queryset_filtered_by_default(self):
"""
A list filter that filters the queryset by default gives the correct
full_result_count.
"""
modeladmin = NotNinetiesListFilterAdmin(Book, site)
request = self.request_factory.get("/", {})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
changelist.get_results(request)
self.assertEqual(changelist.full_result_count, 4)
def test_emptylistfieldfilter(self):
empty_description = Department.objects.create(code="EMPT", description="")
none_description = Department.objects.create(code="NONE", description=None)
empty_title = Book.objects.create(title="", author=self.alfred)
department_admin = DepartmentAdminWithEmptyFieldListFilter(Department, site)
book_admin = BookAdminWithEmptyFieldListFilter(Book, site)
tests = [
# Allows nulls and empty strings.
(
department_admin,
{"description__isempty": "1"},
[empty_description, none_description],
),
(
department_admin,
{"description__isempty": "0"},
[self.dev, self.design],
),
# Allows nulls.
(book_admin, {"author__isempty": "1"}, [self.guitar_book]),
(
book_admin,
{"author__isempty": "0"},
[self.django_book, self.bio_book, self.djangonaut_book, empty_title],
),
# Allows empty strings.
(book_admin, {"title__isempty": "1"}, [empty_title]),
(
book_admin,
{"title__isempty": "0"},
[
self.django_book,
self.bio_book,
self.djangonaut_book,
self.guitar_book,
],
),
]
for modeladmin, query_string, expected_result in tests:
with self.subTest(
modeladmin=modeladmin.__class__.__name__,
query_string=query_string,
):
request = self.request_factory.get("/", query_string)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
queryset = changelist.get_queryset(request)
self.assertCountEqual(queryset, expected_result)
def test_emptylistfieldfilter_reverse_relationships(self):
class UserAdminReverseRelationship(UserAdmin):
list_filter = (("books_contributed", EmptyFieldListFilter),)
ImprovedBook.objects.create(book=self.guitar_book)
no_employees = Department.objects.create(code="NONE", description=None)
book_admin = BookAdminWithEmptyFieldListFilter(Book, site)
department_admin = DepartmentAdminWithEmptyFieldListFilter(Department, site)
user_admin = UserAdminReverseRelationship(User, site)
tests = [
# Reverse one-to-one relationship.
(
book_admin,
{"improvedbook__isempty": "1"},
[self.django_book, self.bio_book, self.djangonaut_book],
),
(book_admin, {"improvedbook__isempty": "0"}, [self.guitar_book]),
# Reverse foreign key relationship.
(department_admin, {"employee__isempty": "1"}, [no_employees]),
(department_admin, {"employee__isempty": "0"}, [self.dev, self.design]),
# Reverse many-to-many relationship.
(user_admin, {"books_contributed__isempty": "1"}, [self.alfred]),
(user_admin, {"books_contributed__isempty": "0"}, [self.bob, self.lisa]),
]
for modeladmin, query_string, expected_result in tests:
with self.subTest(
modeladmin=modeladmin.__class__.__name__,
query_string=query_string,
):
request = self.request_factory.get("/", query_string)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
queryset = changelist.get_queryset(request)
self.assertCountEqual(queryset, expected_result)
def test_emptylistfieldfilter_genericrelation(self):
class BookmarkGenericRelation(ModelAdmin):
list_filter = (("tags", EmptyFieldListFilter),)
modeladmin = BookmarkGenericRelation(Bookmark, site)
django_bookmark = Bookmark.objects.create(url="https://www.djangoproject.com/")
python_bookmark = Bookmark.objects.create(url="https://www.python.org/")
none_tags = Bookmark.objects.create(url="https://www.kernel.org/")
TaggedItem.objects.create(content_object=django_bookmark, tag="python")
TaggedItem.objects.create(content_object=python_bookmark, tag="python")
tests = [
({"tags__isempty": "1"}, [none_tags]),
({"tags__isempty": "0"}, [django_bookmark, python_bookmark]),
]
for query_string, expected_result in tests:
with self.subTest(query_string=query_string):
request = self.request_factory.get("/", query_string)
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
queryset = changelist.get_queryset(request)
self.assertCountEqual(queryset, expected_result)
def test_emptylistfieldfilter_choices(self):
modeladmin = BookAdminWithEmptyFieldListFilter(Book, site)
request = self.request_factory.get("/")
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(filterspec.title, "Verbose Author")
choices = list(filterspec.choices(changelist))
self.assertEqual(len(choices), 3)
self.assertEqual(choices[0]["display"], "All")
self.assertIs(choices[0]["selected"], True)
self.assertEqual(choices[0]["query_string"], "?")
self.assertEqual(choices[1]["display"], "Empty")
self.assertIs(choices[1]["selected"], False)
self.assertEqual(choices[1]["query_string"], "?author__isempty=1")
self.assertEqual(choices[2]["display"], "Not empty")
self.assertIs(choices[2]["selected"], False)
self.assertEqual(choices[2]["query_string"], "?author__isempty=0")
def test_emptylistfieldfilter_non_empty_field(self):
class EmployeeAdminWithEmptyFieldListFilter(ModelAdmin):
list_filter = [("department", EmptyFieldListFilter)]
modeladmin = EmployeeAdminWithEmptyFieldListFilter(Employee, site)
request = self.request_factory.get("/")
request.user = self.alfred
msg = (
"The list filter 'EmptyFieldListFilter' cannot be used with field "
"'department' which doesn't allow empty strings and nulls."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
modeladmin.get_changelist_instance(request)
def test_emptylistfieldfilter_invalid_lookup_parameters(self):
modeladmin = BookAdminWithEmptyFieldListFilter(Book, site)
request = self.request_factory.get("/", {"author__isempty": 42})
request.user = self.alfred
with self.assertRaises(IncorrectLookupParameters):
modeladmin.get_changelist_instance(request)
def test_lookup_using_custom_divider(self):
"""
Filter __in lookups with a custom divider.
"""
jane = Employee.objects.create(name="Jane,Green", department=self.design)
modeladmin = EmployeeCustomDividerFilterAdmin(Employee, site)
employees = [jane, self.jack]
request = self.request_factory.get(
"/", {"name__in": "|".join(e.name for e in employees)}
)
# test for lookup with custom divider
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), employees)
# test for lookup with comma in the lookup string
request = self.request_factory.get("/", {"name": jane.name})
request.user = self.alfred
changelist = modeladmin.get_changelist_instance(request)
# Make sure the correct queryset is returned
queryset = changelist.get_queryset(request)
self.assertEqual(list(queryset), [jane])
|
fb47b74d599fff66bdf606b55fdb46f45169c2a72f40e60a79b7c12fc6ba0d75 | import datetime
import pickle
from decimal import Decimal
from operator import attrgetter
from unittest import mock
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models import (
Aggregate,
Avg,
Case,
Count,
DecimalField,
F,
IntegerField,
Max,
Q,
StdDev,
Sum,
Value,
Variance,
When,
)
from django.test import TestCase, skipUnlessAnyDBFeature, skipUnlessDBFeature
from django.test.utils import Approximate
from .models import (
Alfa,
Author,
Book,
Bravo,
Charlie,
Clues,
Entries,
HardbackBook,
ItemTag,
Publisher,
SelfRefFK,
Store,
WithManualPK,
)
class AggregationTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="Brad Dayley", age=45)
cls.a4 = Author.objects.create(name="James Bennett", age=29)
cls.a5 = Author.objects.create(name="Jeffrey Forcier", age=37)
cls.a6 = Author.objects.create(name="Paul Bissex", age=29)
cls.a7 = Author.objects.create(name="Wesley J. Chun", age=25)
cls.a8 = Author.objects.create(name="Peter Norvig", age=57)
cls.a9 = Author.objects.create(name="Stuart Russell", age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name="Apress", num_awards=3)
cls.p2 = Publisher.objects.create(name="Sams", num_awards=1)
cls.p3 = Publisher.objects.create(name="Prentice Hall", num_awards=7)
cls.p4 = Publisher.objects.create(name="Morgan Kaufmann", num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn="159059725",
name="The Definitive Guide to Django: Web Development Done Right",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6),
)
cls.b2 = Book.objects.create(
isbn="067232959",
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
rating=3.0,
price=Decimal("23.09"),
contact=cls.a3,
publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3),
)
cls.b3 = Book.objects.create(
isbn="159059996",
name="Practical Django Projects",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a4,
publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23),
)
cls.b4 = Book.objects.create(
isbn="013235613",
name="Python Web Development with Django",
pages=350,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a5,
publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3),
)
cls.b5 = HardbackBook.objects.create(
isbn="013790395",
name="Artificial Intelligence: A Modern Approach",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a8,
publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15),
weight=4.5,
)
cls.b6 = HardbackBook.objects.create(
isbn="155860191",
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a8,
publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15),
weight=3.7,
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
s2 = Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30),
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def assertObjectAttrs(self, obj, **kwargs):
for attr, value in kwargs.items():
self.assertEqual(getattr(obj, attr), value)
def test_annotation_with_value(self):
values = (
Book.objects.filter(
name="Practical Django Projects",
)
.annotate(
discount_price=F("price") * 2,
)
.values(
"discount_price",
)
.annotate(sum_discount=Sum("discount_price"))
)
self.assertSequenceEqual(
values,
[{"discount_price": Decimal("59.38"), "sum_discount": Decimal("59.38")}],
)
def test_aggregates_in_where_clause(self):
"""
Regression test for #12822: DatabaseError: aggregates not allowed in
WHERE clause
The subselect works and returns results equivalent to a
query with the IDs listed.
Before the corresponding fix for this bug, this test passed in 1.1 and
failed in 1.2-beta (trunk).
"""
qs = Book.objects.values("contact").annotate(Max("id"))
qs = qs.order_by("contact").values_list("id__max", flat=True)
# don't do anything with the queryset (qs) before including it as a
# subquery
books = Book.objects.order_by("id")
qs1 = books.filter(id__in=qs)
qs2 = books.filter(id__in=list(qs))
self.assertEqual(list(qs1), list(qs2))
def test_aggregates_in_where_clause_pre_eval(self):
"""
Regression test for #12822: DatabaseError: aggregates not allowed in
WHERE clause
Same as the above test, but evaluates the queryset for the subquery
before it's used as a subquery.
Before the corresponding fix for this bug, this test failed in both
1.1 and 1.2-beta (trunk).
"""
qs = Book.objects.values("contact").annotate(Max("id"))
qs = qs.order_by("contact").values_list("id__max", flat=True)
# force the queryset (qs) for the subquery to be evaluated in its
# current state
list(qs)
books = Book.objects.order_by("id")
qs1 = books.filter(id__in=qs)
qs2 = books.filter(id__in=list(qs))
self.assertEqual(list(qs1), list(qs2))
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_annotate_with_extra(self):
"""
Regression test for #11916: Extra params + aggregation creates
incorrect SQL.
"""
# Oracle doesn't support subqueries in group by clause
shortest_book_sql = """
SELECT name
FROM aggregation_regress_book b
WHERE b.publisher_id = aggregation_regress_publisher.id
ORDER BY b.pages
LIMIT 1
"""
# tests that this query does not raise a DatabaseError due to the full
# subselect being (erroneously) added to the GROUP BY parameters
qs = Publisher.objects.extra(
select={
"name_of_shortest_book": shortest_book_sql,
}
).annotate(total_books=Count("book"))
# force execution of the query
list(qs)
def test_aggregate(self):
# Ordering requests are ignored
self.assertEqual(
Author.objects.order_by("name").aggregate(Avg("age")),
{"age__avg": Approximate(37.444, places=1)},
)
# Implicit ordering is also ignored
self.assertEqual(
Book.objects.aggregate(Sum("pages")),
{"pages__sum": 3703},
)
# Baseline results
self.assertEqual(
Book.objects.aggregate(Sum("pages"), Avg("pages")),
{"pages__sum": 3703, "pages__avg": Approximate(617.166, places=2)},
)
# Empty values query doesn't affect grouping or results
self.assertEqual(
Book.objects.values().aggregate(Sum("pages"), Avg("pages")),
{"pages__sum": 3703, "pages__avg": Approximate(617.166, places=2)},
)
# Aggregate overrides extra selected column
self.assertEqual(
Book.objects.extra(select={"price_per_page": "price / pages"}).aggregate(
Sum("pages")
),
{"pages__sum": 3703},
)
def test_annotation(self):
# Annotations get combined with extra select clauses
obj = (
Book.objects.annotate(mean_auth_age=Avg("authors__age"))
.extra(select={"manufacture_cost": "price * .5"})
.get(pk=self.b2.pk)
)
self.assertObjectAttrs(
obj,
contact_id=self.a3.id,
isbn="067232959",
mean_auth_age=45.0,
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
price=Decimal("23.09"),
pubdate=datetime.date(2008, 3, 3),
publisher_id=self.p2.id,
rating=3.0,
)
# Different DB backends return different types for the extra select computation
self.assertIn(obj.manufacture_cost, (11.545, Decimal("11.545")))
# Order of the annotate/extra in the query doesn't matter
obj = (
Book.objects.extra(select={"manufacture_cost": "price * .5"})
.annotate(mean_auth_age=Avg("authors__age"))
.get(pk=self.b2.pk)
)
self.assertObjectAttrs(
obj,
contact_id=self.a3.id,
isbn="067232959",
mean_auth_age=45.0,
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
price=Decimal("23.09"),
pubdate=datetime.date(2008, 3, 3),
publisher_id=self.p2.id,
rating=3.0,
)
# Different DB backends return different types for the extra select computation
self.assertIn(obj.manufacture_cost, (11.545, Decimal("11.545")))
# Values queries can be combined with annotate and extra
obj = (
Book.objects.annotate(mean_auth_age=Avg("authors__age"))
.extra(select={"manufacture_cost": "price * .5"})
.values()
.get(pk=self.b2.pk)
)
manufacture_cost = obj["manufacture_cost"]
self.assertIn(manufacture_cost, (11.545, Decimal("11.545")))
del obj["manufacture_cost"]
self.assertEqual(
obj,
{
"id": self.b2.id,
"contact_id": self.a3.id,
"isbn": "067232959",
"mean_auth_age": 45.0,
"name": "Sams Teach Yourself Django in 24 Hours",
"pages": 528,
"price": Decimal("23.09"),
"pubdate": datetime.date(2008, 3, 3),
"publisher_id": self.p2.id,
"rating": 3.0,
},
)
# The order of the (empty) values, annotate and extra clauses doesn't
# matter
obj = (
Book.objects.values()
.annotate(mean_auth_age=Avg("authors__age"))
.extra(select={"manufacture_cost": "price * .5"})
.get(pk=self.b2.pk)
)
manufacture_cost = obj["manufacture_cost"]
self.assertIn(manufacture_cost, (11.545, Decimal("11.545")))
del obj["manufacture_cost"]
self.assertEqual(
obj,
{
"id": self.b2.id,
"contact_id": self.a3.id,
"isbn": "067232959",
"mean_auth_age": 45.0,
"name": "Sams Teach Yourself Django in 24 Hours",
"pages": 528,
"price": Decimal("23.09"),
"pubdate": datetime.date(2008, 3, 3),
"publisher_id": self.p2.id,
"rating": 3.0,
},
)
# If the annotation precedes the values clause, it won't be included
# unless it is explicitly named
obj = (
Book.objects.annotate(mean_auth_age=Avg("authors__age"))
.extra(select={"price_per_page": "price / pages"})
.values("name")
.get(pk=self.b1.pk)
)
self.assertEqual(
obj,
{
"name": "The Definitive Guide to Django: Web Development Done Right",
},
)
obj = (
Book.objects.annotate(mean_auth_age=Avg("authors__age"))
.extra(select={"price_per_page": "price / pages"})
.values("name", "mean_auth_age")
.get(pk=self.b1.pk)
)
self.assertEqual(
obj,
{
"mean_auth_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
},
)
# If an annotation isn't included in the values, it can still be used
# in a filter
qs = (
Book.objects.annotate(n_authors=Count("authors"))
.values("name")
.filter(n_authors__gt=2)
)
self.assertSequenceEqual(
qs,
[{"name": "Python Web Development with Django"}],
)
# The annotations are added to values output if values() precedes
# annotate()
obj = (
Book.objects.values("name")
.annotate(mean_auth_age=Avg("authors__age"))
.extra(select={"price_per_page": "price / pages"})
.get(pk=self.b1.pk)
)
self.assertEqual(
obj,
{
"mean_auth_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
},
)
# All of the objects are getting counted (allow_nulls) and that values
# respects the amount of objects
self.assertEqual(len(Author.objects.annotate(Avg("friends__age")).values()), 9)
# Consecutive calls to annotate accumulate in the query
qs = (
Book.objects.values("price")
.annotate(oldest=Max("authors__age"))
.order_by("oldest", "price")
.annotate(Max("publisher__num_awards"))
)
self.assertSequenceEqual(
qs,
[
{"price": Decimal("30"), "oldest": 35, "publisher__num_awards__max": 3},
{
"price": Decimal("29.69"),
"oldest": 37,
"publisher__num_awards__max": 7,
},
{
"price": Decimal("23.09"),
"oldest": 45,
"publisher__num_awards__max": 1,
},
{"price": Decimal("75"), "oldest": 57, "publisher__num_awards__max": 9},
{
"price": Decimal("82.8"),
"oldest": 57,
"publisher__num_awards__max": 7,
},
],
)
def test_aggregate_annotation(self):
# Aggregates can be composed over annotations.
# The return type is derived from the composed aggregate
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(
Max("pages"), Max("price"), Sum("num_authors"), Avg("num_authors")
)
self.assertEqual(
vals,
{
"num_authors__sum": 10,
"num_authors__avg": Approximate(1.666, places=2),
"pages__max": 1132,
"price__max": Decimal("82.80"),
},
)
# Regression for #15624 - Missing SELECT columns when using values, annotate
# and aggregate in a single query
self.assertEqual(
Book.objects.annotate(c=Count("authors")).values("c").aggregate(Max("c")),
{"c__max": 3},
)
def test_conditional_aggregate(self):
# Conditional aggregation of a grouped queryset.
self.assertEqual(
Book.objects.annotate(c=Count("authors"))
.values("pk")
.aggregate(test=Sum(Case(When(c__gt=1, then=1))))["test"],
3,
)
def test_sliced_conditional_aggregate(self):
self.assertEqual(
Author.objects.order_by("pk")[:5].aggregate(
test=Sum(Case(When(age__lte=35, then=1)))
)["test"],
3,
)
def test_annotated_conditional_aggregate(self):
annotated_qs = Book.objects.annotate(
discount_price=F("price") * Decimal("0.75")
)
self.assertAlmostEqual(
annotated_qs.aggregate(
test=Avg(
Case(
When(pages__lt=400, then="discount_price"),
output_field=DecimalField(),
)
)
)["test"],
Decimal("22.27"),
places=2,
)
def test_distinct_conditional_aggregate(self):
self.assertEqual(
Book.objects.distinct().aggregate(
test=Avg(
Case(
When(price=Decimal("29.69"), then="pages"),
output_field=IntegerField(),
)
)
)["test"],
325,
)
def test_conditional_aggregate_on_complex_condition(self):
self.assertEqual(
Book.objects.distinct().aggregate(
test=Avg(
Case(
When(
Q(price__gte=Decimal("29")) & Q(price__lt=Decimal("30")),
then="pages",
),
output_field=IntegerField(),
)
)
)["test"],
325,
)
def test_decimal_aggregate_annotation_filter(self):
"""
Filtering on an aggregate annotation with Decimal values should work.
Requires special handling on SQLite (#18247).
"""
self.assertEqual(
len(
Author.objects.annotate(sum=Sum("book_contact_set__price")).filter(
sum__gt=Decimal(40)
)
),
1,
)
self.assertEqual(
len(
Author.objects.annotate(sum=Sum("book_contact_set__price")).filter(
sum__lte=Decimal(40)
)
),
4,
)
def test_field_error(self):
# Bad field requests in aggregates are caught and reported
msg = (
"Cannot resolve keyword 'foo' into field. Choices are: authors, "
"contact, contact_id, hardbackbook, id, isbn, name, pages, price, "
"pubdate, publisher, publisher_id, rating, store, tags"
)
with self.assertRaisesMessage(FieldError, msg):
Book.objects.aggregate(num_authors=Count("foo"))
with self.assertRaisesMessage(FieldError, msg):
Book.objects.annotate(num_authors=Count("foo"))
msg = (
"Cannot resolve keyword 'foo' into field. Choices are: authors, "
"contact, contact_id, hardbackbook, id, isbn, name, num_authors, "
"pages, price, pubdate, publisher, publisher_id, rating, store, tags"
)
with self.assertRaisesMessage(FieldError, msg):
Book.objects.annotate(num_authors=Count("authors__id")).aggregate(
Max("foo")
)
def test_more(self):
# Old-style count aggregations can be mixed with new-style
self.assertEqual(Book.objects.annotate(num_authors=Count("authors")).count(), 6)
# Non-ordinal, non-computed Aggregates over annotations correctly
# inherit the annotation's internal type if the annotation is ordinal
# or computed
vals = Book.objects.annotate(num_authors=Count("authors")).aggregate(
Max("num_authors")
)
self.assertEqual(vals, {"num_authors__max": 3})
vals = Publisher.objects.annotate(avg_price=Avg("book__price")).aggregate(
Max("avg_price")
)
self.assertEqual(vals, {"avg_price__max": 75.0})
# Aliases are quoted to protected aliases that might be reserved names
vals = Book.objects.aggregate(number=Max("pages"), select=Max("pages"))
self.assertEqual(vals, {"number": 1132, "select": 1132})
# Regression for #10064: select_related() plays nice with aggregates
obj = (
Book.objects.select_related("publisher")
.annotate(num_authors=Count("authors"))
.values()
.get(isbn="013790395")
)
self.assertEqual(
obj,
{
"contact_id": self.a8.id,
"id": self.b5.id,
"isbn": "013790395",
"name": "Artificial Intelligence: A Modern Approach",
"num_authors": 2,
"pages": 1132,
"price": Decimal("82.8"),
"pubdate": datetime.date(1995, 1, 15),
"publisher_id": self.p3.id,
"rating": 4.0,
},
)
# Regression for #10010: exclude on an aggregate field is correctly
# negated
self.assertEqual(len(Book.objects.annotate(num_authors=Count("authors"))), 6)
self.assertEqual(
len(
Book.objects.annotate(num_authors=Count("authors")).filter(
num_authors__gt=2
)
),
1,
)
self.assertEqual(
len(
Book.objects.annotate(num_authors=Count("authors")).exclude(
num_authors__gt=2
)
),
5,
)
self.assertEqual(
len(
Book.objects.annotate(num_authors=Count("authors"))
.filter(num_authors__lt=3)
.exclude(num_authors__lt=2)
),
2,
)
self.assertEqual(
len(
Book.objects.annotate(num_authors=Count("authors"))
.exclude(num_authors__lt=2)
.filter(num_authors__lt=3)
),
2,
)
def test_aggregate_fexpr(self):
# Aggregates can be used with F() expressions
# ... where the F() is pushed into the HAVING clause
qs = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__lt=F("num_awards") / 2)
.order_by("name")
.values("name", "num_books", "num_awards")
)
self.assertSequenceEqual(
qs,
[
{"num_books": 1, "name": "Morgan Kaufmann", "num_awards": 9},
{"num_books": 2, "name": "Prentice Hall", "num_awards": 7},
],
)
qs = (
Publisher.objects.annotate(num_books=Count("book"))
.exclude(num_books__lt=F("num_awards") / 2)
.order_by("name")
.values("name", "num_books", "num_awards")
)
self.assertSequenceEqual(
qs,
[
{"num_books": 2, "name": "Apress", "num_awards": 3},
{"num_books": 0, "name": "Jonno's House of Books", "num_awards": 0},
{"num_books": 1, "name": "Sams", "num_awards": 1},
],
)
# ... and where the F() references an aggregate
qs = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_awards__gt=2 * F("num_books"))
.order_by("name")
.values("name", "num_books", "num_awards")
)
self.assertSequenceEqual(
qs,
[
{"num_books": 1, "name": "Morgan Kaufmann", "num_awards": 9},
{"num_books": 2, "name": "Prentice Hall", "num_awards": 7},
],
)
qs = (
Publisher.objects.annotate(num_books=Count("book"))
.exclude(num_books__lt=F("num_awards") / 2)
.order_by("name")
.values("name", "num_books", "num_awards")
)
self.assertSequenceEqual(
qs,
[
{"num_books": 2, "name": "Apress", "num_awards": 3},
{"num_books": 0, "name": "Jonno's House of Books", "num_awards": 0},
{"num_books": 1, "name": "Sams", "num_awards": 1},
],
)
def test_db_col_table(self):
# Tests on fields with non-default table and column names.
qs = Clues.objects.values("EntryID__Entry").annotate(
Appearances=Count("EntryID"), Distinct_Clues=Count("Clue", distinct=True)
)
self.assertQuerysetEqual(qs, [])
qs = Entries.objects.annotate(clue_count=Count("clues__ID"))
self.assertQuerysetEqual(qs, [])
def test_boolean_conversion(self):
# Aggregates mixed up ordering of columns for backend's convert_values
# method. Refs #21126.
e = Entries.objects.create(Entry="foo")
c = Clues.objects.create(EntryID=e, Clue="bar")
qs = Clues.objects.select_related("EntryID").annotate(Count("ID"))
self.assertSequenceEqual(qs, [c])
self.assertEqual(qs[0].EntryID, e)
self.assertIs(qs[0].EntryID.Exclude, False)
def test_empty(self):
# Regression for #10089: Check handling of empty result sets with
# aggregates
self.assertEqual(Book.objects.filter(id__in=[]).count(), 0)
vals = Book.objects.filter(id__in=[]).aggregate(
num_authors=Count("authors"),
avg_authors=Avg("authors"),
max_authors=Max("authors"),
max_price=Max("price"),
max_rating=Max("rating"),
)
self.assertEqual(
vals,
{
"max_authors": None,
"max_rating": None,
"num_authors": 0,
"avg_authors": None,
"max_price": None,
},
)
qs = (
Publisher.objects.filter(name="Jonno's House of Books")
.annotate(
num_authors=Count("book__authors"),
avg_authors=Avg("book__authors"),
max_authors=Max("book__authors"),
max_price=Max("book__price"),
max_rating=Max("book__rating"),
)
.values()
)
self.assertSequenceEqual(
qs,
[
{
"max_authors": None,
"name": "Jonno's House of Books",
"num_awards": 0,
"max_price": None,
"num_authors": 0,
"max_rating": None,
"id": self.p5.id,
"avg_authors": None,
}
],
)
def test_more_more(self):
# Regression for #10113 - Fields mentioned in order_by() must be
# included in the GROUP BY. This only becomes a problem when the
# order_by introduces a new join.
self.assertQuerysetEqual(
Book.objects.annotate(num_authors=Count("authors")).order_by(
"publisher__name", "name"
),
[
"Practical Django Projects",
"The Definitive Guide to Django: Web Development Done Right",
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
"Artificial Intelligence: A Modern Approach",
"Python Web Development with Django",
"Sams Teach Yourself Django in 24 Hours",
],
lambda b: b.name,
)
# Regression for #10127 - Empty select_related() works with annotate
qs = (
Book.objects.filter(rating__lt=4.5)
.select_related()
.annotate(Avg("authors__age"))
.order_by("name")
)
self.assertQuerysetEqual(
qs,
[
(
"Artificial Intelligence: A Modern Approach",
51.5,
"Prentice Hall",
"Peter Norvig",
),
("Practical Django Projects", 29.0, "Apress", "James Bennett"),
(
"Python Web Development with Django",
Approximate(30.333, places=2),
"Prentice Hall",
"Jeffrey Forcier",
),
("Sams Teach Yourself Django in 24 Hours", 45.0, "Sams", "Brad Dayley"),
],
lambda b: (b.name, b.authors__age__avg, b.publisher.name, b.contact.name),
)
# Regression for #10132 - If the values() clause only mentioned extra
# (select=) columns, those columns are used for grouping
qs = (
Book.objects.extra(select={"pub": "publisher_id"})
.values("pub")
.annotate(Count("id"))
.order_by("pub")
)
self.assertSequenceEqual(
qs,
[
{"pub": self.p1.id, "id__count": 2},
{"pub": self.p2.id, "id__count": 1},
{"pub": self.p3.id, "id__count": 2},
{"pub": self.p4.id, "id__count": 1},
],
)
qs = (
Book.objects.extra(select={"pub": "publisher_id", "foo": "pages"})
.values("pub")
.annotate(Count("id"))
.order_by("pub")
)
self.assertSequenceEqual(
qs,
[
{"pub": self.p1.id, "id__count": 2},
{"pub": self.p2.id, "id__count": 1},
{"pub": self.p3.id, "id__count": 2},
{"pub": self.p4.id, "id__count": 1},
],
)
# Regression for #10182 - Queries with aggregate calls are correctly
# realiased when used in a subquery
ids = (
Book.objects.filter(pages__gt=100)
.annotate(n_authors=Count("authors"))
.filter(n_authors__gt=2)
.order_by("n_authors")
)
self.assertQuerysetEqual(
Book.objects.filter(id__in=ids),
[
"Python Web Development with Django",
],
lambda b: b.name,
)
# Regression for #15709 - Ensure each group_by field only exists once
# per query
qstr = str(
Book.objects.values("publisher")
.annotate(max_pages=Max("pages"))
.order_by()
.query
)
# There is just one GROUP BY clause (zero commas means at most one clause).
self.assertEqual(qstr[qstr.index("GROUP BY") :].count(", "), 0)
def test_duplicate_alias(self):
# Regression for #11256 - duplicating a default alias raises ValueError.
msg = (
"The named annotation 'authors__age__avg' conflicts with "
"the default name for another annotation."
)
with self.assertRaisesMessage(ValueError, msg):
Book.objects.annotate(
Avg("authors__age"), authors__age__avg=Avg("authors__age")
)
def test_field_name_conflict(self):
# Regression for #11256 - providing an aggregate name
# that conflicts with a field name on the model raises ValueError
msg = "The annotation 'age' conflicts with a field on the model."
with self.assertRaisesMessage(ValueError, msg):
Author.objects.annotate(age=Avg("friends__age"))
def test_m2m_name_conflict(self):
# Regression for #11256 - providing an aggregate name
# that conflicts with an m2m name on the model raises ValueError
msg = "The annotation 'friends' conflicts with a field on the model."
with self.assertRaisesMessage(ValueError, msg):
Author.objects.annotate(friends=Count("friends"))
def test_fk_attname_conflict(self):
msg = "The annotation 'contact_id' conflicts with a field on the model."
with self.assertRaisesMessage(ValueError, msg):
Book.objects.annotate(contact_id=F("publisher_id"))
def test_values_queryset_non_conflict(self):
# If you're using a values query set, some potential conflicts are
# avoided.
# age is a field on Author, so it shouldn't be allowed as an aggregate.
# But age isn't included in values(), so it is.
results = (
Author.objects.values("name")
.annotate(age=Count("book_contact_set"))
.order_by("name")
)
self.assertEqual(len(results), 9)
self.assertEqual(results[0]["name"], "Adrian Holovaty")
self.assertEqual(results[0]["age"], 1)
# Same problem, but aggregating over m2m fields
results = (
Author.objects.values("name")
.annotate(age=Avg("friends__age"))
.order_by("name")
)
self.assertEqual(len(results), 9)
self.assertEqual(results[0]["name"], "Adrian Holovaty")
self.assertEqual(results[0]["age"], 32.0)
# Same problem, but colliding with an m2m field
results = (
Author.objects.values("name")
.annotate(friends=Count("friends"))
.order_by("name")
)
self.assertEqual(len(results), 9)
self.assertEqual(results[0]["name"], "Adrian Holovaty")
self.assertEqual(results[0]["friends"], 2)
def test_reverse_relation_name_conflict(self):
# Regression for #11256 - providing an aggregate name
# that conflicts with a reverse-related name on the model raises ValueError
msg = "The annotation 'book_contact_set' conflicts with a field on the model."
with self.assertRaisesMessage(ValueError, msg):
Author.objects.annotate(book_contact_set=Avg("friends__age"))
def test_pickle(self):
# Regression for #10197 -- Queries with aggregates can be pickled.
# First check that pickling is possible at all. No crash = success
qs = Book.objects.annotate(num_authors=Count("authors"))
pickle.dumps(qs)
# Then check that the round trip works.
query = qs.query.get_compiler(qs.db).as_sql()[0]
qs2 = pickle.loads(pickle.dumps(qs))
self.assertEqual(
qs2.query.get_compiler(qs2.db).as_sql()[0],
query,
)
def test_more_more_more(self):
# Regression for #10199 - Aggregate calls clone the original query so
# the original query can still be used
books = Book.objects.all()
books.aggregate(Avg("authors__age"))
self.assertQuerysetEqual(
books.all(),
[
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
"Practical Django Projects",
"Python Web Development with Django",
"Sams Teach Yourself Django in 24 Hours",
"The Definitive Guide to Django: Web Development Done Right",
],
lambda b: b.name,
)
# Regression for #10248 - Annotations work with dates()
qs = (
Book.objects.annotate(num_authors=Count("authors"))
.filter(num_authors=2)
.dates("pubdate", "day")
)
self.assertSequenceEqual(
qs,
[
datetime.date(1995, 1, 15),
datetime.date(2007, 12, 6),
],
)
# Regression for #10290 - extra selects with parameters can be used for
# grouping.
qs = (
Book.objects.annotate(mean_auth_age=Avg("authors__age"))
.extra(select={"sheets": "(pages + %s) / %s"}, select_params=[1, 2])
.order_by("sheets")
.values("sheets")
)
self.assertQuerysetEqual(
qs, [150, 175, 224, 264, 473, 566], lambda b: int(b["sheets"])
)
# Regression for 10425 - annotations don't get in the way of a count()
# clause
self.assertEqual(
Book.objects.values("publisher").annotate(Count("publisher")).count(), 4
)
self.assertEqual(
Book.objects.annotate(Count("publisher")).values("publisher").count(), 6
)
# Note: intentionally no order_by(), that case needs tests, too.
publishers = Publisher.objects.filter(id__in=[self.p1.id, self.p2.id])
self.assertEqual(sorted(p.name for p in publishers), ["Apress", "Sams"])
publishers = publishers.annotate(n_books=Count("book"))
sorted_publishers = sorted(publishers, key=lambda x: x.name)
self.assertEqual(sorted_publishers[0].n_books, 2)
self.assertEqual(sorted_publishers[1].n_books, 1)
self.assertEqual(sorted(p.name for p in publishers), ["Apress", "Sams"])
books = Book.objects.filter(publisher__in=publishers)
self.assertQuerysetEqual(
books,
[
"Practical Django Projects",
"Sams Teach Yourself Django in 24 Hours",
"The Definitive Guide to Django: Web Development Done Right",
],
lambda b: b.name,
)
self.assertEqual(sorted(p.name for p in publishers), ["Apress", "Sams"])
# Regression for 10666 - inherited fields work with annotations and
# aggregations
self.assertEqual(
HardbackBook.objects.aggregate(n_pages=Sum("book_ptr__pages")),
{"n_pages": 2078},
)
self.assertEqual(
HardbackBook.objects.aggregate(n_pages=Sum("pages")),
{"n_pages": 2078},
)
qs = (
HardbackBook.objects.annotate(
n_authors=Count("book_ptr__authors"),
)
.values("name", "n_authors")
.order_by("name")
)
self.assertSequenceEqual(
qs,
[
{"n_authors": 2, "name": "Artificial Intelligence: A Modern Approach"},
{
"n_authors": 1,
"name": (
"Paradigms of Artificial Intelligence Programming: Case "
"Studies in Common Lisp"
),
},
],
)
qs = (
HardbackBook.objects.annotate(n_authors=Count("authors"))
.values("name", "n_authors")
.order_by("name")
)
self.assertSequenceEqual(
qs,
[
{"n_authors": 2, "name": "Artificial Intelligence: A Modern Approach"},
{
"n_authors": 1,
"name": (
"Paradigms of Artificial Intelligence Programming: Case "
"Studies in Common Lisp"
),
},
],
)
# Regression for #10766 - Shouldn't be able to reference an aggregate
# fields in an aggregate() call.
msg = "Cannot compute Avg('mean_age'): 'mean_age' is an aggregate"
with self.assertRaisesMessage(FieldError, msg):
Book.objects.annotate(mean_age=Avg("authors__age")).annotate(
Avg("mean_age")
)
def test_empty_filter_count(self):
self.assertEqual(
Author.objects.filter(id__in=[]).annotate(Count("friends")).count(), 0
)
def test_empty_filter_aggregate(self):
self.assertEqual(
Author.objects.filter(id__in=[])
.annotate(Count("friends"))
.aggregate(Count("pk")),
{"pk__count": 0},
)
def test_none_call_before_aggregate(self):
# Regression for #11789
self.assertEqual(
Author.objects.none().aggregate(Avg("age")), {"age__avg": None}
)
def test_annotate_and_join(self):
self.assertEqual(
Author.objects.annotate(c=Count("friends__name"))
.exclude(friends__name="Joe")
.count(),
Author.objects.count(),
)
def test_f_expression_annotation(self):
# Books with less than 200 pages per author.
qs = (
Book.objects.values("name")
.annotate(n_authors=Count("authors"))
.filter(pages__lt=F("n_authors") * 200)
.values_list("pk")
)
self.assertQuerysetEqual(
Book.objects.filter(pk__in=qs),
["Python Web Development with Django"],
attrgetter("name"),
)
def test_values_annotate_values(self):
qs = (
Book.objects.values("name")
.annotate(n_authors=Count("authors"))
.values_list("pk", flat=True)
.order_by("name")
)
self.assertEqual(list(qs), list(Book.objects.values_list("pk", flat=True)))
def test_having_group_by(self):
# When a field occurs on the LHS of a HAVING clause that it
# appears correctly in the GROUP BY clause
qs = (
Book.objects.values_list("name")
.annotate(n_authors=Count("authors"))
.filter(pages__gt=F("n_authors"))
.values_list("name", flat=True)
.order_by("name")
)
# Results should be the same, all Books have more pages than authors
self.assertEqual(list(qs), list(Book.objects.values_list("name", flat=True)))
def test_values_list_annotation_args_ordering(self):
"""
Annotate *args ordering should be preserved in values_list results.
**kwargs comes after *args.
Regression test for #23659.
"""
books = (
Book.objects.values_list("publisher__name")
.annotate(
Count("id"), Avg("price"), Avg("authors__age"), avg_pgs=Avg("pages")
)
.order_by("-publisher__name")
)
self.assertEqual(books[0], ("Sams", 1, Decimal("23.09"), 45.0, 528.0))
def test_annotation_disjunction(self):
qs = (
Book.objects.annotate(n_authors=Count("authors"))
.filter(Q(n_authors=2) | Q(name="Python Web Development with Django"))
.order_by("name")
)
self.assertQuerysetEqual(
qs,
[
"Artificial Intelligence: A Modern Approach",
"Python Web Development with Django",
"The Definitive Guide to Django: Web Development Done Right",
],
attrgetter("name"),
)
qs = (
Book.objects.annotate(n_authors=Count("authors")).filter(
Q(name="The Definitive Guide to Django: Web Development Done Right")
| (
Q(name="Artificial Intelligence: A Modern Approach")
& Q(n_authors=3)
)
)
).order_by("name")
self.assertQuerysetEqual(
qs,
[
"The Definitive Guide to Django: Web Development Done Right",
],
attrgetter("name"),
)
qs = (
Publisher.objects.annotate(
rating_sum=Sum("book__rating"), book_count=Count("book")
)
.filter(Q(rating_sum__gt=5.5) | Q(rating_sum__isnull=True))
.order_by("pk")
)
self.assertQuerysetEqual(
qs,
[
"Apress",
"Prentice Hall",
"Jonno's House of Books",
],
attrgetter("name"),
)
qs = (
Publisher.objects.annotate(
rating_sum=Sum("book__rating"), book_count=Count("book")
)
.filter(Q(rating_sum__gt=F("book_count")) | Q(rating_sum=None))
.order_by("num_awards")
)
self.assertQuerysetEqual(
qs,
[
"Jonno's House of Books",
"Sams",
"Apress",
"Prentice Hall",
"Morgan Kaufmann",
],
attrgetter("name"),
)
def test_quoting_aggregate_order_by(self):
qs = (
Book.objects.filter(name="Python Web Development with Django")
.annotate(authorCount=Count("authors"))
.order_by("authorCount")
)
self.assertQuerysetEqual(
qs,
[
("Python Web Development with Django", 3),
],
lambda b: (b.name, b.authorCount),
)
def test_stddev(self):
self.assertEqual(
Book.objects.aggregate(StdDev("pages")),
{"pages__stddev": Approximate(311.46, 1)},
)
self.assertEqual(
Book.objects.aggregate(StdDev("rating")),
{"rating__stddev": Approximate(0.60, 1)},
)
self.assertEqual(
Book.objects.aggregate(StdDev("price")),
{"price__stddev": Approximate(Decimal("24.16"), 2)},
)
self.assertEqual(
Book.objects.aggregate(StdDev("pages", sample=True)),
{"pages__stddev": Approximate(341.19, 2)},
)
self.assertEqual(
Book.objects.aggregate(StdDev("rating", sample=True)),
{"rating__stddev": Approximate(0.66, 2)},
)
self.assertEqual(
Book.objects.aggregate(StdDev("price", sample=True)),
{"price__stddev": Approximate(Decimal("26.46"), 1)},
)
self.assertEqual(
Book.objects.aggregate(Variance("pages")),
{"pages__variance": Approximate(97010.80, 1)},
)
self.assertEqual(
Book.objects.aggregate(Variance("rating")),
{"rating__variance": Approximate(0.36, 1)},
)
self.assertEqual(
Book.objects.aggregate(Variance("price")),
{"price__variance": Approximate(Decimal("583.77"), 1)},
)
self.assertEqual(
Book.objects.aggregate(Variance("pages", sample=True)),
{"pages__variance": Approximate(116412.96, 1)},
)
self.assertEqual(
Book.objects.aggregate(Variance("rating", sample=True)),
{"rating__variance": Approximate(0.44, 2)},
)
self.assertEqual(
Book.objects.aggregate(Variance("price", sample=True)),
{"price__variance": Approximate(Decimal("700.53"), 2)},
)
def test_filtering_by_annotation_name(self):
# Regression test for #14476
# The name of the explicitly provided annotation name in this case
# poses no problem
qs = (
Author.objects.annotate(book_cnt=Count("book"))
.filter(book_cnt=2)
.order_by("name")
)
self.assertQuerysetEqual(qs, ["Peter Norvig"], lambda b: b.name)
# Neither in this case
qs = (
Author.objects.annotate(book_count=Count("book"))
.filter(book_count=2)
.order_by("name")
)
self.assertQuerysetEqual(qs, ["Peter Norvig"], lambda b: b.name)
# This case used to fail because the ORM couldn't resolve the
# automatically generated annotation name `book__count`
qs = (
Author.objects.annotate(Count("book"))
.filter(book__count=2)
.order_by("name")
)
self.assertQuerysetEqual(qs, ["Peter Norvig"], lambda b: b.name)
# Referencing the auto-generated name in an aggregate() also works.
self.assertEqual(
Author.objects.annotate(Count("book")).aggregate(Max("book__count")),
{"book__count__max": 2},
)
def test_annotate_joins(self):
"""
The base table's join isn't promoted to LOUTER. This could
cause the query generation to fail if there is an exclude() for fk-field
in the query, too. Refs #19087.
"""
qs = Book.objects.annotate(n=Count("pk"))
self.assertIs(qs.query.alias_map["aggregation_regress_book"].join_type, None)
# The query executes without problems.
self.assertEqual(len(qs.exclude(publisher=-1)), 6)
@skipUnlessAnyDBFeature("allows_group_by_pk", "allows_group_by_selected_pks")
def test_aggregate_duplicate_columns(self):
# Regression test for #17144
results = Author.objects.annotate(num_contacts=Count("book_contact_set"))
# There should only be one GROUP BY clause, for the `id` column.
# `name` and `age` should not be grouped on.
_, _, group_by = results.query.get_compiler(using="default").pre_sql_setup()
self.assertEqual(len(group_by), 1)
self.assertIn("id", group_by[0][0])
self.assertNotIn("name", group_by[0][0])
self.assertNotIn("age", group_by[0][0])
self.assertEqual(
[(a.name, a.num_contacts) for a in results.order_by("name")],
[
("Adrian Holovaty", 1),
("Brad Dayley", 1),
("Jacob Kaplan-Moss", 0),
("James Bennett", 1),
("Jeffrey Forcier", 1),
("Paul Bissex", 0),
("Peter Norvig", 2),
("Stuart Russell", 0),
("Wesley J. Chun", 0),
],
)
@skipUnlessAnyDBFeature("allows_group_by_pk", "allows_group_by_selected_pks")
def test_aggregate_duplicate_columns_only(self):
# Works with only() too.
results = Author.objects.only("id", "name").annotate(
num_contacts=Count("book_contact_set")
)
_, _, grouping = results.query.get_compiler(using="default").pre_sql_setup()
self.assertEqual(len(grouping), 1)
self.assertIn("id", grouping[0][0])
self.assertNotIn("name", grouping[0][0])
self.assertNotIn("age", grouping[0][0])
self.assertEqual(
[(a.name, a.num_contacts) for a in results.order_by("name")],
[
("Adrian Holovaty", 1),
("Brad Dayley", 1),
("Jacob Kaplan-Moss", 0),
("James Bennett", 1),
("Jeffrey Forcier", 1),
("Paul Bissex", 0),
("Peter Norvig", 2),
("Stuart Russell", 0),
("Wesley J. Chun", 0),
],
)
@skipUnlessAnyDBFeature("allows_group_by_pk", "allows_group_by_selected_pks")
def test_aggregate_duplicate_columns_select_related(self):
# And select_related()
results = Book.objects.select_related("contact").annotate(
num_authors=Count("authors")
)
_, _, grouping = results.query.get_compiler(using="default").pre_sql_setup()
# In the case of `group_by_selected_pks` we also group by contact.id
# because of the select_related.
self.assertEqual(
len(grouping), 1 if connection.features.allows_group_by_pk else 2
)
self.assertIn("id", grouping[0][0])
self.assertNotIn("name", grouping[0][0])
self.assertNotIn("contact", grouping[0][0])
self.assertEqual(
[(b.name, b.num_authors) for b in results.order_by("name")],
[
("Artificial Intelligence: A Modern Approach", 2),
(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
1,
),
("Practical Django Projects", 1),
("Python Web Development with Django", 3),
("Sams Teach Yourself Django in 24 Hours", 1),
("The Definitive Guide to Django: Web Development Done Right", 2),
],
)
@skipUnlessDBFeature("allows_group_by_selected_pks")
def test_aggregate_unmanaged_model_columns(self):
"""
Unmanaged models are sometimes used to represent database views which
may not allow grouping by selected primary key.
"""
def assertQuerysetResults(queryset):
self.assertEqual(
[(b.name, b.num_authors) for b in queryset.order_by("name")],
[
("Artificial Intelligence: A Modern Approach", 2),
(
"Paradigms of Artificial Intelligence Programming: Case "
"Studies in Common Lisp",
1,
),
("Practical Django Projects", 1),
("Python Web Development with Django", 3),
("Sams Teach Yourself Django in 24 Hours", 1),
("The Definitive Guide to Django: Web Development Done Right", 2),
],
)
queryset = Book.objects.select_related("contact").annotate(
num_authors=Count("authors")
)
# Unmanaged origin model.
with mock.patch.object(Book._meta, "managed", False):
_, _, grouping = queryset.query.get_compiler(
using="default"
).pre_sql_setup()
self.assertEqual(len(grouping), len(Book._meta.fields) + 1)
for index, field in enumerate(Book._meta.fields):
self.assertIn(field.name, grouping[index][0])
self.assertIn(Author._meta.pk.name, grouping[-1][0])
assertQuerysetResults(queryset)
# Unmanaged related model.
with mock.patch.object(Author._meta, "managed", False):
_, _, grouping = queryset.query.get_compiler(
using="default"
).pre_sql_setup()
self.assertEqual(len(grouping), len(Author._meta.fields) + 1)
self.assertIn(Book._meta.pk.name, grouping[0][0])
for index, field in enumerate(Author._meta.fields):
self.assertIn(field.name, grouping[index + 1][0])
assertQuerysetResults(queryset)
@skipUnlessDBFeature("allows_group_by_selected_pks")
def test_aggregate_unmanaged_model_as_tables(self):
qs = Book.objects.select_related("contact").annotate(
num_authors=Count("authors")
)
# Force treating unmanaged models as tables.
with mock.patch(
"django.db.connection.features.allows_group_by_selected_pks_on_model",
return_value=True,
):
with mock.patch.object(Book._meta, "managed", False), mock.patch.object(
Author._meta, "managed", False
):
_, _, grouping = qs.query.get_compiler(using="default").pre_sql_setup()
self.assertEqual(len(grouping), 2)
self.assertIn("id", grouping[0][0])
self.assertIn("id", grouping[1][0])
self.assertQuerysetEqual(
qs.order_by("name"),
[
("Artificial Intelligence: A Modern Approach", 2),
(
"Paradigms of Artificial Intelligence Programming: Case "
"Studies in Common Lisp",
1,
),
("Practical Django Projects", 1),
("Python Web Development with Django", 3),
("Sams Teach Yourself Django in 24 Hours", 1),
(
"The Definitive Guide to Django: Web Development Done "
"Right",
2,
),
],
attrgetter("name", "num_authors"),
)
def test_reverse_join_trimming(self):
qs = Author.objects.annotate(Count("book_contact_set__contact"))
self.assertIn(" JOIN ", str(qs.query))
def test_aggregation_with_generic_reverse_relation(self):
"""
Regression test for #10870: Aggregates with joins ignore extra
filters provided by setup_joins
tests aggregations with generic reverse relations
"""
django_book = Book.objects.get(name="Practical Django Projects")
ItemTag.objects.create(
object_id=django_book.id,
tag="intermediate",
content_type=ContentType.objects.get_for_model(django_book),
)
ItemTag.objects.create(
object_id=django_book.id,
tag="django",
content_type=ContentType.objects.get_for_model(django_book),
)
# Assign a tag to model with same PK as the book above. If the JOIN
# used in aggregation doesn't have content type as part of the
# condition the annotation will also count the 'hi mom' tag for b.
wmpk = WithManualPK.objects.create(id=django_book.pk)
ItemTag.objects.create(
object_id=wmpk.id,
tag="hi mom",
content_type=ContentType.objects.get_for_model(wmpk),
)
ai_book = Book.objects.get(
name__startswith="Paradigms of Artificial Intelligence"
)
ItemTag.objects.create(
object_id=ai_book.id,
tag="intermediate",
content_type=ContentType.objects.get_for_model(ai_book),
)
self.assertEqual(Book.objects.aggregate(Count("tags")), {"tags__count": 3})
results = Book.objects.annotate(Count("tags")).order_by("-tags__count", "name")
self.assertEqual(
[(b.name, b.tags__count) for b in results],
[
("Practical Django Projects", 2),
(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
1,
),
("Artificial Intelligence: A Modern Approach", 0),
("Python Web Development with Django", 0),
("Sams Teach Yourself Django in 24 Hours", 0),
("The Definitive Guide to Django: Web Development Done Right", 0),
],
)
def test_negated_aggregation(self):
expected_results = Author.objects.exclude(
pk__in=Author.objects.annotate(book_cnt=Count("book")).filter(book_cnt=2)
).order_by("name")
expected_results = [a.name for a in expected_results]
qs = (
Author.objects.annotate(book_cnt=Count("book"))
.exclude(Q(book_cnt=2), Q(book_cnt=2))
.order_by("name")
)
self.assertQuerysetEqual(qs, expected_results, lambda b: b.name)
expected_results = Author.objects.exclude(
pk__in=Author.objects.annotate(book_cnt=Count("book")).filter(book_cnt=2)
).order_by("name")
expected_results = [a.name for a in expected_results]
qs = (
Author.objects.annotate(book_cnt=Count("book"))
.exclude(Q(book_cnt=2) | Q(book_cnt=2))
.order_by("name")
)
self.assertQuerysetEqual(qs, expected_results, lambda b: b.name)
def test_name_filters(self):
qs = (
Author.objects.annotate(Count("book"))
.filter(Q(book__count__exact=2) | Q(name="Adrian Holovaty"))
.order_by("name")
)
self.assertQuerysetEqual(
qs, ["Adrian Holovaty", "Peter Norvig"], lambda b: b.name
)
def test_name_expressions(self):
# Aggregates are spotted correctly from F objects.
# Note that Adrian's age is 34 in the fixtures, and he has one book
# so both conditions match one author.
qs = (
Author.objects.annotate(Count("book"))
.filter(Q(name="Peter Norvig") | Q(age=F("book__count") + 33))
.order_by("name")
)
self.assertQuerysetEqual(
qs, ["Adrian Holovaty", "Peter Norvig"], lambda b: b.name
)
def test_filter_aggregates_or_connector(self):
q1 = Q(price__gt=50)
q2 = Q(authors__count__gt=1)
query = Book.objects.annotate(Count("authors")).filter(q1 | q2).order_by("pk")
self.assertQuerysetEqual(
query,
[self.b1.pk, self.b4.pk, self.b5.pk, self.b6.pk],
attrgetter("pk"),
)
def test_filter_aggregates_negated_and_connector(self):
q1 = Q(price__gt=50)
q2 = Q(authors__count__gt=1)
query = (
Book.objects.annotate(Count("authors")).filter(~(q1 & q2)).order_by("pk")
)
self.assertQuerysetEqual(
query,
[self.b1.pk, self.b2.pk, self.b3.pk, self.b4.pk, self.b6.pk],
attrgetter("pk"),
)
def test_filter_aggregates_xor_connector(self):
q1 = Q(price__gt=50)
q2 = Q(authors__count__gt=1)
query = Book.objects.annotate(Count("authors")).filter(q1 ^ q2).order_by("pk")
self.assertQuerysetEqual(
query,
[self.b1.pk, self.b4.pk, self.b6.pk],
attrgetter("pk"),
)
def test_filter_aggregates_negated_xor_connector(self):
q1 = Q(price__gt=50)
q2 = Q(authors__count__gt=1)
query = (
Book.objects.annotate(Count("authors")).filter(~(q1 ^ q2)).order_by("pk")
)
self.assertQuerysetEqual(
query,
[self.b2.pk, self.b3.pk, self.b5.pk],
attrgetter("pk"),
)
def test_ticket_11293_q_immutable(self):
"""
Splitting a q object to parts for where/having doesn't alter
the original q-object.
"""
q1 = Q(isbn="")
q2 = Q(authors__count__gt=1)
query = Book.objects.annotate(Count("authors"))
query.filter(q1 | q2)
self.assertEqual(len(q2.children), 1)
def test_fobj_group_by(self):
"""
An F() object referring to related column works correctly in group by.
"""
qs = Book.objects.annotate(account=Count("authors")).filter(
account=F("publisher__num_awards")
)
self.assertQuerysetEqual(
qs, ["Sams Teach Yourself Django in 24 Hours"], lambda b: b.name
)
def test_annotate_reserved_word(self):
"""
Regression #18333 - Ensure annotated column name is properly quoted.
"""
vals = Book.objects.annotate(select=Count("authors__id")).aggregate(
Sum("select"), Avg("select")
)
self.assertEqual(
vals,
{
"select__sum": 10,
"select__avg": Approximate(1.666, places=2),
},
)
def test_annotate_on_relation(self):
book = Book.objects.annotate(
avg_price=Avg("price"), publisher_name=F("publisher__name")
).get(pk=self.b1.pk)
self.assertEqual(book.avg_price, 30.00)
self.assertEqual(book.publisher_name, "Apress")
def test_aggregate_on_relation(self):
# A query with an existing annotation aggregation on a relation should
# succeed.
qs = Book.objects.annotate(avg_price=Avg("price")).aggregate(
publisher_awards=Sum("publisher__num_awards")
)
self.assertEqual(qs["publisher_awards"], 30)
def test_annotate_distinct_aggregate(self):
# There are three books with rating of 4.0 and two of the books have
# the same price. Hence, the distinct removes one rating of 4.0
# from the results.
vals1 = (
Book.objects.values("rating", "price")
.distinct()
.aggregate(result=Sum("rating"))
)
vals2 = Book.objects.aggregate(result=Sum("rating") - Value(4.0))
self.assertEqual(vals1, vals2)
def test_annotate_values_list_flat(self):
"""Find ages that are shared by at least two authors."""
qs = (
Author.objects.values_list("age", flat=True)
.annotate(age_count=Count("age"))
.filter(age_count__gt=1)
)
self.assertSequenceEqual(qs, [29])
def test_allow_distinct(self):
class MyAggregate(Aggregate):
pass
with self.assertRaisesMessage(TypeError, "MyAggregate does not allow distinct"):
MyAggregate("foo", distinct=True)
class DistinctAggregate(Aggregate):
allow_distinct = True
DistinctAggregate("foo", distinct=True)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_having_subquery_select(self):
authors = Author.objects.filter(pk=self.a1.pk)
books = Book.objects.annotate(Count("authors")).filter(
Q(authors__in=authors) | Q(authors__count__gt=2)
)
self.assertEqual(set(books), {self.b1, self.b4})
class JoinPromotionTests(TestCase):
def test_ticket_21150(self):
b = Bravo.objects.create()
c = Charlie.objects.create(bravo=b)
qs = Charlie.objects.select_related("alfa").annotate(Count("bravo__charlie"))
self.assertSequenceEqual(qs, [c])
self.assertIs(qs[0].alfa, None)
a = Alfa.objects.create()
c.alfa = a
c.save()
# Force re-evaluation
qs = qs.all()
self.assertSequenceEqual(qs, [c])
self.assertEqual(qs[0].alfa, a)
def test_existing_join_not_promoted(self):
# No promotion for existing joins
qs = Charlie.objects.filter(alfa__name__isnull=False).annotate(
Count("alfa__name")
)
self.assertIn(" INNER JOIN ", str(qs.query))
# Also, the existing join is unpromoted when doing filtering for already
# promoted join.
qs = Charlie.objects.annotate(Count("alfa__name")).filter(
alfa__name__isnull=False
)
self.assertIn(" INNER JOIN ", str(qs.query))
# But, as the join is nullable first use by annotate will be LOUTER
qs = Charlie.objects.annotate(Count("alfa__name"))
self.assertIn(" LEFT OUTER JOIN ", str(qs.query))
def test_non_nullable_fk_not_promoted(self):
qs = Book.objects.annotate(Count("contact__name"))
self.assertIn(" INNER JOIN ", str(qs.query))
class SelfReferentialFKTests(TestCase):
def test_ticket_24748(self):
t1 = SelfRefFK.objects.create(name="t1")
SelfRefFK.objects.create(name="t2", parent=t1)
SelfRefFK.objects.create(name="t3", parent=t1)
self.assertQuerysetEqual(
SelfRefFK.objects.annotate(num_children=Count("children")).order_by("name"),
[("t1", 2), ("t2", 0), ("t3", 0)],
lambda x: (x.name, x.num_children),
)
|
c0d07da6b668fa2b21b372629e040cfd46765f1a8dabebde6ef8a2a8dc7e6c71 | from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import FieldError
from django.db.models import Q
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from .models import (
AllowsNullGFK,
Animal,
Carrot,
Comparison,
ConcreteRelatedModel,
ForConcreteModelModel,
ForProxyModelModel,
Gecko,
ManualPK,
Mineral,
ProxyRelatedModel,
Rock,
TaggedItem,
ValuableRock,
ValuableTaggedItem,
Vegetable,
)
class GenericRelationsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.lion = Animal.objects.create(common_name="Lion", latin_name="Panthera leo")
cls.platypus = Animal.objects.create(
common_name="Platypus",
latin_name="Ornithorhynchus anatinus",
)
Vegetable.objects.create(name="Eggplant", is_yucky=True)
cls.bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
cls.quartz = Mineral.objects.create(name="Quartz", hardness=7)
# Tagging stuff.
cls.fatty = cls.bacon.tags.create(tag="fatty")
cls.salty = cls.bacon.tags.create(tag="salty")
cls.yellow = cls.lion.tags.create(tag="yellow")
cls.hairy = cls.lion.tags.create(tag="hairy")
def comp_func(self, obj):
# Original list of tags:
return obj.tag, obj.content_type.model_class(), obj.object_id
def test_generic_update_or_create_when_created(self):
"""
Should be able to use update_or_create from the generic related manager
to create a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag, created = self.bacon.tags.update_or_create(tag="stinky")
self.assertTrue(created)
self.assertEqual(count + 1, self.bacon.tags.count())
def test_generic_update_or_create_when_updated(self):
"""
Should be able to use update_or_create from the generic related manager
to update a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag = self.bacon.tags.create(tag="stinky")
self.assertEqual(count + 1, self.bacon.tags.count())
tag, created = self.bacon.tags.update_or_create(
defaults={"tag": "juicy"}, id=tag.id
)
self.assertFalse(created)
self.assertEqual(count + 1, self.bacon.tags.count())
self.assertEqual(tag.tag, "juicy")
def test_generic_get_or_create_when_created(self):
"""
Should be able to use get_or_create from the generic related manager
to create a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag, created = self.bacon.tags.get_or_create(tag="stinky")
self.assertTrue(created)
self.assertEqual(count + 1, self.bacon.tags.count())
def test_generic_get_or_create_when_exists(self):
"""
Should be able to use get_or_create from the generic related manager
to get a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag = self.bacon.tags.create(tag="stinky")
self.assertEqual(count + 1, self.bacon.tags.count())
tag, created = self.bacon.tags.get_or_create(
id=tag.id, defaults={"tag": "juicy"}
)
self.assertFalse(created)
self.assertEqual(count + 1, self.bacon.tags.count())
# shouldn't had changed the tag
self.assertEqual(tag.tag, "stinky")
def test_generic_relations_m2m_mimic(self):
"""
Objects with declared GenericRelations can be tagged directly -- the
API mimics the many-to-many API.
"""
self.assertSequenceEqual(self.lion.tags.all(), [self.hairy, self.yellow])
self.assertSequenceEqual(self.bacon.tags.all(), [self.fatty, self.salty])
def test_access_content_object(self):
"""
Test accessing the content object like a foreign key.
"""
tagged_item = TaggedItem.objects.get(tag="salty")
self.assertEqual(tagged_item.content_object, self.bacon)
def test_query_content_object(self):
qs = TaggedItem.objects.filter(animal__isnull=False).order_by(
"animal__common_name", "tag"
)
self.assertSequenceEqual(qs, [self.hairy, self.yellow])
mpk = ManualPK.objects.create(id=1)
mpk.tags.create(tag="mpk")
qs = TaggedItem.objects.filter(
Q(animal__isnull=False) | Q(manualpk__id=1)
).order_by("tag")
self.assertQuerysetEqual(qs, ["hairy", "mpk", "yellow"], lambda x: x.tag)
def test_exclude_generic_relations(self):
"""
Test lookups over an object without GenericRelations.
"""
# Recall that the Mineral class doesn't have an explicit GenericRelation
# defined. That's OK, because you can create TaggedItems explicitly.
# However, excluding GenericRelations means your lookups have to be a
# bit more explicit.
shiny = TaggedItem.objects.create(content_object=self.quartz, tag="shiny")
clearish = TaggedItem.objects.create(content_object=self.quartz, tag="clearish")
ctype = ContentType.objects.get_for_model(self.quartz)
q = TaggedItem.objects.filter(
content_type__pk=ctype.id, object_id=self.quartz.id
)
self.assertSequenceEqual(q, [clearish, shiny])
def test_access_via_content_type(self):
"""
Test lookups through content type.
"""
self.lion.delete()
self.platypus.tags.create(tag="fatty")
ctype = ContentType.objects.get_for_model(self.platypus)
self.assertSequenceEqual(
Animal.objects.filter(tags__content_type=ctype),
[self.platypus],
)
def test_set_foreign_key(self):
"""
You can set a generic foreign key in the way you'd expect.
"""
tag1 = TaggedItem.objects.create(content_object=self.quartz, tag="shiny")
tag1.content_object = self.platypus
tag1.save()
self.assertSequenceEqual(self.platypus.tags.all(), [tag1])
def test_queries_across_generic_relations(self):
"""
Queries across generic relations respect the content types. Even though
there are two TaggedItems with a tag of "fatty", this query only pulls
out the one with the content type related to Animals.
"""
self.assertSequenceEqual(
Animal.objects.order_by("common_name"),
[self.lion, self.platypus],
)
def test_queries_content_type_restriction(self):
"""
Create another fatty tagged instance with different PK to ensure there
is a content type restriction in the generated queries below.
"""
mpk = ManualPK.objects.create(id=self.lion.pk)
mpk.tags.create(tag="fatty")
self.platypus.tags.create(tag="fatty")
self.assertSequenceEqual(
Animal.objects.filter(tags__tag="fatty"),
[self.platypus],
)
self.assertSequenceEqual(
Animal.objects.exclude(tags__tag="fatty"),
[self.lion],
)
def test_object_deletion_with_generic_relation(self):
"""
If you delete an object with an explicit Generic relation, the related
objects are deleted when the source object is deleted.
"""
self.assertQuerysetEqual(
TaggedItem.objects.all(),
[
("fatty", Vegetable, self.bacon.pk),
("hairy", Animal, self.lion.pk),
("salty", Vegetable, self.bacon.pk),
("yellow", Animal, self.lion.pk),
],
self.comp_func,
)
self.lion.delete()
self.assertQuerysetEqual(
TaggedItem.objects.all(),
[
("fatty", Vegetable, self.bacon.pk),
("salty", Vegetable, self.bacon.pk),
],
self.comp_func,
)
def test_object_deletion_without_generic_relation(self):
"""
If Generic Relation is not explicitly defined, any related objects
remain after deletion of the source object.
"""
TaggedItem.objects.create(content_object=self.quartz, tag="clearish")
quartz_pk = self.quartz.pk
self.quartz.delete()
self.assertQuerysetEqual(
TaggedItem.objects.all(),
[
("clearish", Mineral, quartz_pk),
("fatty", Vegetable, self.bacon.pk),
("hairy", Animal, self.lion.pk),
("salty", Vegetable, self.bacon.pk),
("yellow", Animal, self.lion.pk),
],
self.comp_func,
)
def test_tag_deletion_related_objects_unaffected(self):
"""
If you delete a tag, the objects using the tag are unaffected (other
than losing a tag).
"""
ctype = ContentType.objects.get_for_model(self.lion)
tag = TaggedItem.objects.get(
content_type__pk=ctype.id, object_id=self.lion.id, tag="hairy"
)
tag.delete()
self.assertSequenceEqual(self.lion.tags.all(), [self.yellow])
self.assertQuerysetEqual(
TaggedItem.objects.all(),
[
("fatty", Vegetable, self.bacon.pk),
("salty", Vegetable, self.bacon.pk),
("yellow", Animal, self.lion.pk),
],
self.comp_func,
)
def test_add_bulk(self):
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
t1 = TaggedItem.objects.create(content_object=self.quartz, tag="shiny")
t2 = TaggedItem.objects.create(content_object=self.quartz, tag="clearish")
# One update() query.
with self.assertNumQueries(1):
bacon.tags.add(t1, t2)
self.assertEqual(t1.content_object, bacon)
self.assertEqual(t2.content_object, bacon)
def test_add_bulk_false(self):
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
t1 = TaggedItem.objects.create(content_object=self.quartz, tag="shiny")
t2 = TaggedItem.objects.create(content_object=self.quartz, tag="clearish")
# One save() for each object.
with self.assertNumQueries(2):
bacon.tags.add(t1, t2, bulk=False)
self.assertEqual(t1.content_object, bacon)
self.assertEqual(t2.content_object, bacon)
def test_add_rejects_unsaved_objects(self):
t1 = TaggedItem(content_object=self.quartz, tag="shiny")
msg = (
"<TaggedItem: shiny> instance isn't saved. Use bulk=False or save the "
"object first."
)
with self.assertRaisesMessage(ValueError, msg):
self.bacon.tags.add(t1)
def test_add_rejects_wrong_instances(self):
msg = "'TaggedItem' instance expected, got <Animal: Lion>"
with self.assertRaisesMessage(TypeError, msg):
self.bacon.tags.add(self.lion)
def test_set(self):
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
fatty = bacon.tags.create(tag="fatty")
salty = bacon.tags.create(tag="salty")
bacon.tags.set([fatty, salty])
self.assertSequenceEqual(bacon.tags.all(), [fatty, salty])
bacon.tags.set([fatty])
self.assertSequenceEqual(bacon.tags.all(), [fatty])
bacon.tags.set([])
self.assertSequenceEqual(bacon.tags.all(), [])
bacon.tags.set([fatty, salty], bulk=False, clear=True)
self.assertSequenceEqual(bacon.tags.all(), [fatty, salty])
bacon.tags.set([fatty], bulk=False, clear=True)
self.assertSequenceEqual(bacon.tags.all(), [fatty])
bacon.tags.set([], clear=True)
self.assertSequenceEqual(bacon.tags.all(), [])
def test_assign(self):
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
fatty = bacon.tags.create(tag="fatty")
salty = bacon.tags.create(tag="salty")
bacon.tags.set([fatty, salty])
self.assertSequenceEqual(bacon.tags.all(), [fatty, salty])
bacon.tags.set([fatty])
self.assertSequenceEqual(bacon.tags.all(), [fatty])
bacon.tags.set([])
self.assertSequenceEqual(bacon.tags.all(), [])
def test_assign_with_queryset(self):
# Querysets used in reverse GFK assignments are pre-evaluated so their
# value isn't affected by the clearing operation
# in ManyRelatedManager.set() (#19816).
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
bacon.tags.create(tag="fatty")
bacon.tags.create(tag="salty")
self.assertEqual(2, bacon.tags.count())
qs = bacon.tags.filter(tag="fatty")
bacon.tags.set(qs)
self.assertEqual(1, bacon.tags.count())
self.assertEqual(1, qs.count())
def test_clear(self):
self.assertSequenceEqual(
TaggedItem.objects.order_by("tag"),
[self.fatty, self.hairy, self.salty, self.yellow],
)
self.bacon.tags.clear()
self.assertSequenceEqual(self.bacon.tags.all(), [])
self.assertSequenceEqual(
TaggedItem.objects.order_by("tag"),
[self.hairy, self.yellow],
)
def test_remove(self):
self.assertSequenceEqual(
TaggedItem.objects.order_by("tag"),
[self.fatty, self.hairy, self.salty, self.yellow],
)
self.bacon.tags.remove(self.fatty)
self.assertSequenceEqual(self.bacon.tags.all(), [self.salty])
self.assertSequenceEqual(
TaggedItem.objects.order_by("tag"),
[self.hairy, self.salty, self.yellow],
)
def test_generic_relation_related_name_default(self):
# GenericRelation isn't usable from the reverse side by default.
msg = (
"Cannot resolve keyword 'vegetable' into field. Choices are: "
"animal, content_object, content_type, content_type_id, id, "
"manualpk, object_id, tag, valuabletaggeditem"
)
with self.assertRaisesMessage(FieldError, msg):
TaggedItem.objects.filter(vegetable__isnull=True)
def test_multiple_gfk(self):
# Simple tests for multiple GenericForeignKeys
# only uses one model, since the above tests should be sufficient.
tiger = Animal.objects.create(common_name="tiger")
cheetah = Animal.objects.create(common_name="cheetah")
bear = Animal.objects.create(common_name="bear")
# Create directly
c1 = Comparison.objects.create(
first_obj=cheetah, other_obj=tiger, comparative="faster"
)
c2 = Comparison.objects.create(
first_obj=tiger, other_obj=cheetah, comparative="cooler"
)
# Create using GenericRelation
c3 = tiger.comparisons.create(other_obj=bear, comparative="cooler")
c4 = tiger.comparisons.create(other_obj=cheetah, comparative="stronger")
self.assertSequenceEqual(cheetah.comparisons.all(), [c1])
# Filtering works
self.assertCountEqual(
tiger.comparisons.filter(comparative="cooler"),
[c2, c3],
)
# Filtering and deleting works
subjective = ["cooler"]
tiger.comparisons.filter(comparative__in=subjective).delete()
self.assertCountEqual(Comparison.objects.all(), [c1, c4])
# If we delete cheetah, Comparisons with cheetah as 'first_obj' will be
# deleted since Animal has an explicit GenericRelation to Comparison
# through first_obj. Comparisons with cheetah as 'other_obj' will not
# be deleted.
cheetah.delete()
self.assertSequenceEqual(Comparison.objects.all(), [c4])
def test_gfk_subclasses(self):
# GenericForeignKey should work with subclasses (see #8309)
quartz = Mineral.objects.create(name="Quartz", hardness=7)
valuedtag = ValuableTaggedItem.objects.create(
content_object=quartz, tag="shiny", value=10
)
self.assertEqual(valuedtag.content_object, quartz)
def test_generic_relation_to_inherited_child(self):
# GenericRelations to models that use multi-table inheritance work.
granite = ValuableRock.objects.create(name="granite", hardness=5)
ValuableTaggedItem.objects.create(
content_object=granite, tag="countertop", value=1
)
self.assertEqual(ValuableRock.objects.filter(tags__value=1).count(), 1)
# We're generating a slightly inefficient query for tags__tag - we
# first join ValuableRock -> TaggedItem -> ValuableTaggedItem, and then
# we fetch tag by joining TaggedItem from ValuableTaggedItem. The last
# join isn't necessary, as TaggedItem <-> ValuableTaggedItem is a
# one-to-one join.
self.assertEqual(ValuableRock.objects.filter(tags__tag="countertop").count(), 1)
granite.delete() # deleting the rock should delete the related tag.
self.assertEqual(ValuableTaggedItem.objects.count(), 0)
def test_gfk_manager(self):
# GenericForeignKey should not use the default manager (which may
# filter objects).
tailless = Gecko.objects.create(has_tail=False)
tag = TaggedItem.objects.create(content_object=tailless, tag="lizard")
self.assertEqual(tag.content_object, tailless)
def test_subclasses_with_gen_rel(self):
"""
Concrete model subclasses with generic relations work
correctly (ticket 11263).
"""
granite = Rock.objects.create(name="granite", hardness=5)
TaggedItem.objects.create(content_object=granite, tag="countertop")
self.assertEqual(Rock.objects.get(tags__tag="countertop"), granite)
def test_subclasses_with_parent_gen_rel(self):
"""
Generic relations on a base class (Vegetable) work correctly in
subclasses (Carrot).
"""
bear = Carrot.objects.create(name="carrot")
TaggedItem.objects.create(content_object=bear, tag="orange")
self.assertEqual(Carrot.objects.get(tags__tag="orange"), bear)
def test_get_or_create(self):
# get_or_create should work with virtual fields (content_object)
quartz = Mineral.objects.create(name="Quartz", hardness=7)
tag, created = TaggedItem.objects.get_or_create(
tag="shiny", defaults={"content_object": quartz}
)
self.assertTrue(created)
self.assertEqual(tag.tag, "shiny")
self.assertEqual(tag.content_object.id, quartz.id)
def test_update_or_create_defaults(self):
# update_or_create should work with virtual fields (content_object)
quartz = Mineral.objects.create(name="Quartz", hardness=7)
diamond = Mineral.objects.create(name="Diamond", hardness=7)
tag, created = TaggedItem.objects.update_or_create(
tag="shiny", defaults={"content_object": quartz}
)
self.assertTrue(created)
self.assertEqual(tag.content_object.id, quartz.id)
tag, created = TaggedItem.objects.update_or_create(
tag="shiny", defaults={"content_object": diamond}
)
self.assertFalse(created)
self.assertEqual(tag.content_object.id, diamond.id)
def test_query_content_type(self):
msg = "Field 'content_object' does not generate an automatic reverse relation"
with self.assertRaisesMessage(FieldError, msg):
TaggedItem.objects.get(content_object="")
def test_unsaved_generic_foreign_key_parent_save(self):
quartz = Mineral(name="Quartz", hardness=7)
tagged_item = TaggedItem(tag="shiny", content_object=quartz)
msg = (
"save() prohibited to prevent data loss due to unsaved related object "
"'content_object'."
)
with self.assertRaisesMessage(ValueError, msg):
tagged_item.save()
@skipUnlessDBFeature("has_bulk_insert")
def test_unsaved_generic_foreign_key_parent_bulk_create(self):
quartz = Mineral(name="Quartz", hardness=7)
tagged_item = TaggedItem(tag="shiny", content_object=quartz)
msg = (
"bulk_create() prohibited to prevent data loss due to unsaved related "
"object 'content_object'."
)
with self.assertRaisesMessage(ValueError, msg):
TaggedItem.objects.bulk_create([tagged_item])
def test_cache_invalidation_for_content_type_id(self):
# Create a Vegetable and Mineral with the same id.
new_id = (
max(
Vegetable.objects.order_by("-id")[0].id,
Mineral.objects.order_by("-id")[0].id,
)
+ 1
)
broccoli = Vegetable.objects.create(id=new_id, name="Broccoli")
diamond = Mineral.objects.create(id=new_id, name="Diamond", hardness=7)
tag = TaggedItem.objects.create(content_object=broccoli, tag="yummy")
tag.content_type = ContentType.objects.get_for_model(diamond)
self.assertEqual(tag.content_object, diamond)
def test_cache_invalidation_for_object_id(self):
broccoli = Vegetable.objects.create(name="Broccoli")
cauliflower = Vegetable.objects.create(name="Cauliflower")
tag = TaggedItem.objects.create(content_object=broccoli, tag="yummy")
tag.object_id = cauliflower.id
self.assertEqual(tag.content_object, cauliflower)
def test_assign_content_object_in_init(self):
spinach = Vegetable(name="spinach")
tag = TaggedItem(content_object=spinach)
self.assertEqual(tag.content_object, spinach)
def test_create_after_prefetch(self):
platypus = Animal.objects.prefetch_related("tags").get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [])
weird_tag = platypus.tags.create(tag="weird")
self.assertSequenceEqual(platypus.tags.all(), [weird_tag])
def test_add_after_prefetch(self):
platypus = Animal.objects.prefetch_related("tags").get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [])
weird_tag = TaggedItem.objects.create(tag="weird", content_object=platypus)
platypus.tags.add(weird_tag)
self.assertSequenceEqual(platypus.tags.all(), [weird_tag])
def test_remove_after_prefetch(self):
weird_tag = self.platypus.tags.create(tag="weird")
platypus = Animal.objects.prefetch_related("tags").get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [weird_tag])
platypus.tags.remove(weird_tag)
self.assertSequenceEqual(platypus.tags.all(), [])
def test_clear_after_prefetch(self):
weird_tag = self.platypus.tags.create(tag="weird")
platypus = Animal.objects.prefetch_related("tags").get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [weird_tag])
platypus.tags.clear()
self.assertSequenceEqual(platypus.tags.all(), [])
def test_set_after_prefetch(self):
platypus = Animal.objects.prefetch_related("tags").get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [])
furry_tag = TaggedItem.objects.create(tag="furry", content_object=platypus)
platypus.tags.set([furry_tag])
self.assertSequenceEqual(platypus.tags.all(), [furry_tag])
weird_tag = TaggedItem.objects.create(tag="weird", content_object=platypus)
platypus.tags.set([weird_tag])
self.assertSequenceEqual(platypus.tags.all(), [weird_tag])
def test_add_then_remove_after_prefetch(self):
furry_tag = self.platypus.tags.create(tag="furry")
platypus = Animal.objects.prefetch_related("tags").get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [furry_tag])
weird_tag = self.platypus.tags.create(tag="weird")
platypus.tags.add(weird_tag)
self.assertSequenceEqual(platypus.tags.all(), [furry_tag, weird_tag])
platypus.tags.remove(weird_tag)
self.assertSequenceEqual(platypus.tags.all(), [furry_tag])
def test_prefetch_related_different_content_types(self):
TaggedItem.objects.create(content_object=self.platypus, tag="prefetch_tag_1")
TaggedItem.objects.create(
content_object=Vegetable.objects.create(name="Broccoli"),
tag="prefetch_tag_2",
)
TaggedItem.objects.create(
content_object=Animal.objects.create(common_name="Bear"),
tag="prefetch_tag_3",
)
qs = TaggedItem.objects.filter(
tag__startswith="prefetch_tag_",
).prefetch_related("content_object", "content_object__tags")
with self.assertNumQueries(4):
tags = list(qs)
for tag in tags:
self.assertSequenceEqual(tag.content_object.tags.all(), [tag])
def test_prefetch_related_custom_object_id(self):
tiger = Animal.objects.create(common_name="tiger")
cheetah = Animal.objects.create(common_name="cheetah")
Comparison.objects.create(
first_obj=cheetah,
other_obj=tiger,
comparative="faster",
)
Comparison.objects.create(
first_obj=tiger,
other_obj=cheetah,
comparative="cooler",
)
qs = Comparison.objects.prefetch_related("first_obj__comparisons")
for comparison in qs:
self.assertSequenceEqual(
comparison.first_obj.comparisons.all(), [comparison]
)
class ProxyRelatedModelTest(TestCase):
def test_default_behavior(self):
"""
The default for for_concrete_model should be True
"""
base = ForConcreteModelModel()
base.obj = rel = ProxyRelatedModel.objects.create()
base.save()
base = ForConcreteModelModel.objects.get(pk=base.pk)
rel = ConcreteRelatedModel.objects.get(pk=rel.pk)
self.assertEqual(base.obj, rel)
def test_works_normally(self):
"""
When for_concrete_model is False, we should still be able to get
an instance of the concrete class.
"""
base = ForProxyModelModel()
base.obj = rel = ConcreteRelatedModel.objects.create()
base.save()
base = ForProxyModelModel.objects.get(pk=base.pk)
self.assertEqual(base.obj, rel)
def test_proxy_is_returned(self):
"""
Instances of the proxy should be returned when
for_concrete_model is False.
"""
base = ForProxyModelModel()
base.obj = ProxyRelatedModel.objects.create()
base.save()
base = ForProxyModelModel.objects.get(pk=base.pk)
self.assertIsInstance(base.obj, ProxyRelatedModel)
def test_query(self):
base = ForProxyModelModel()
base.obj = rel = ConcreteRelatedModel.objects.create()
base.save()
self.assertEqual(rel, ConcreteRelatedModel.objects.get(bases__id=base.id))
def test_query_proxy(self):
base = ForProxyModelModel()
base.obj = rel = ProxyRelatedModel.objects.create()
base.save()
self.assertEqual(rel, ProxyRelatedModel.objects.get(bases__id=base.id))
def test_generic_relation(self):
base = ForProxyModelModel()
base.obj = ProxyRelatedModel.objects.create()
base.save()
base = ForProxyModelModel.objects.get(pk=base.pk)
rel = ProxyRelatedModel.objects.get(pk=base.obj.pk)
self.assertEqual(base, rel.bases.get())
def test_generic_relation_set(self):
base = ForProxyModelModel()
base.obj = ConcreteRelatedModel.objects.create()
base.save()
newrel = ConcreteRelatedModel.objects.create()
newrel.bases.set([base])
newrel = ConcreteRelatedModel.objects.get(pk=newrel.pk)
self.assertEqual(base, newrel.bases.get())
class TestInitWithNoneArgument(SimpleTestCase):
def test_none_allowed(self):
# AllowsNullGFK doesn't require a content_type, so None argument should
# also be allowed.
AllowsNullGFK(content_object=None)
# TaggedItem requires a content_type but initializing with None should
# be allowed.
TaggedItem(content_object=None)
|
d5753d64c50671f2b59bb7ee3ca485d772e508459c6b0a407048bbcab371022c | from django.db.models import ProtectedError, Q, Sum
from django.forms.models import modelform_factory
from django.test import TestCase, skipIfDBFeature
from .models import (
A,
Address,
B,
Board,
C,
Cafe,
CharLink,
Company,
Contact,
Content,
D,
Guild,
HasLinkThing,
Link,
Node,
Note,
OddRelation1,
OddRelation2,
Organization,
Person,
Place,
Related,
Restaurant,
Tag,
Team,
TextLink,
)
class GenericRelationTests(TestCase):
def test_inherited_models_content_type(self):
"""
GenericRelations on inherited classes use the correct content type.
"""
p = Place.objects.create(name="South Park")
r = Restaurant.objects.create(name="Chubby's")
l1 = Link.objects.create(content_object=p)
l2 = Link.objects.create(content_object=r)
self.assertEqual(list(p.links.all()), [l1])
self.assertEqual(list(r.links.all()), [l2])
def test_reverse_relation_pk(self):
"""
The correct column name is used for the primary key on the
originating model of a query. See #12664.
"""
p = Person.objects.create(account=23, name="Chef")
Address.objects.create(
street="123 Anywhere Place",
city="Conifer",
state="CO",
zipcode="80433",
content_object=p,
)
qs = Person.objects.filter(addresses__zipcode="80433")
self.assertEqual(1, qs.count())
self.assertEqual("Chef", qs[0].name)
def test_charlink_delete(self):
oddrel = OddRelation1.objects.create(name="clink")
CharLink.objects.create(content_object=oddrel)
oddrel.delete()
def test_textlink_delete(self):
oddrel = OddRelation2.objects.create(name="tlink")
TextLink.objects.create(content_object=oddrel)
oddrel.delete()
def test_coerce_object_id_remote_field_cache_persistence(self):
restaurant = Restaurant.objects.create()
CharLink.objects.create(content_object=restaurant)
charlink = CharLink.objects.latest("pk")
self.assertIs(charlink.content_object, charlink.content_object)
# If the model (Cafe) uses more than one level of multi-table inheritance.
cafe = Cafe.objects.create()
CharLink.objects.create(content_object=cafe)
charlink = CharLink.objects.latest("pk")
self.assertIs(charlink.content_object, charlink.content_object)
def test_q_object_or(self):
"""
SQL query parameters for generic relations are properly
grouped when OR is used (#11535).
In this bug the first query (below) works while the second, with the
query parameters the same but in reverse order, does not.
The issue is that the generic relation conditions do not get properly
grouped in parentheses.
"""
note_contact = Contact.objects.create()
org_contact = Contact.objects.create()
Note.objects.create(note="note", content_object=note_contact)
org = Organization.objects.create(name="org name")
org.contacts.add(org_contact)
# search with a non-matching note and a matching org name
qs = Contact.objects.filter(
Q(notes__note__icontains=r"other note")
| Q(organizations__name__icontains=r"org name")
)
self.assertIn(org_contact, qs)
# search again, with the same query parameters, in reverse order
qs = Contact.objects.filter(
Q(organizations__name__icontains=r"org name")
| Q(notes__note__icontains=r"other note")
)
self.assertIn(org_contact, qs)
def test_join_reuse(self):
qs = Person.objects.filter(addresses__street="foo").filter(
addresses__street="bar"
)
self.assertEqual(str(qs.query).count("JOIN"), 2)
def test_generic_relation_ordering(self):
"""
Ordering over a generic relation does not include extraneous
duplicate results, nor excludes rows not participating in the relation.
"""
p1 = Place.objects.create(name="South Park")
p2 = Place.objects.create(name="The City")
c = Company.objects.create(name="Chubby's Intl.")
Link.objects.create(content_object=p1)
Link.objects.create(content_object=c)
places = list(Place.objects.order_by("links__id"))
def count_places(place):
return len([p for p in places if p.id == place.id])
self.assertEqual(len(places), 2)
self.assertEqual(count_places(p1), 1)
self.assertEqual(count_places(p2), 1)
def test_target_model_len_zero(self):
"""
Saving a model with a GenericForeignKey to a model instance whose
__len__ method returns 0 (Team.__len__() here) shouldn't fail (#13085).
"""
team1 = Team.objects.create(name="Backend devs")
note = Note(note="Deserve a bonus", content_object=team1)
note.save()
def test_target_model_bool_false(self):
"""
Saving a model with a GenericForeignKey to a model instance whose
__bool__ method returns False (Guild.__bool__() here) shouldn't fail
(#13085).
"""
g1 = Guild.objects.create(name="First guild")
note = Note(note="Note for guild", content_object=g1)
note.save()
@skipIfDBFeature("interprets_empty_strings_as_nulls")
def test_gfk_to_model_with_empty_pk(self):
"""Test related to #13085"""
# Saving model with GenericForeignKey to model instance with an
# empty CharField PK
b1 = Board.objects.create(name="")
tag = Tag(label="VP", content_object=b1)
tag.save()
def test_ticket_20378(self):
# Create a couple of extra HasLinkThing so that the autopk value
# isn't the same for Link and HasLinkThing.
hs1 = HasLinkThing.objects.create()
hs2 = HasLinkThing.objects.create()
hs3 = HasLinkThing.objects.create()
hs4 = HasLinkThing.objects.create()
l1 = Link.objects.create(content_object=hs3)
l2 = Link.objects.create(content_object=hs4)
self.assertSequenceEqual(HasLinkThing.objects.filter(links=l1), [hs3])
self.assertSequenceEqual(HasLinkThing.objects.filter(links=l2), [hs4])
self.assertSequenceEqual(
HasLinkThing.objects.exclude(links=l2), [hs1, hs2, hs3]
)
self.assertSequenceEqual(
HasLinkThing.objects.exclude(links=l1), [hs1, hs2, hs4]
)
def test_ticket_20564(self):
b1 = B.objects.create()
b2 = B.objects.create()
b3 = B.objects.create()
c1 = C.objects.create(b=b1)
c2 = C.objects.create(b=b2)
c3 = C.objects.create(b=b3)
A.objects.create(flag=None, content_object=b1)
A.objects.create(flag=True, content_object=b2)
self.assertSequenceEqual(C.objects.filter(b__a__flag=None), [c1, c3])
self.assertSequenceEqual(C.objects.exclude(b__a__flag=None), [c2])
def test_ticket_20564_nullable_fk(self):
b1 = B.objects.create()
b2 = B.objects.create()
b3 = B.objects.create()
d1 = D.objects.create(b=b1)
d2 = D.objects.create(b=b2)
d3 = D.objects.create(b=b3)
d4 = D.objects.create()
A.objects.create(flag=None, content_object=b1)
A.objects.create(flag=True, content_object=b1)
A.objects.create(flag=True, content_object=b2)
self.assertSequenceEqual(D.objects.exclude(b__a__flag=None), [d2])
self.assertSequenceEqual(D.objects.filter(b__a__flag=None), [d1, d3, d4])
self.assertSequenceEqual(B.objects.filter(a__flag=None), [b1, b3])
self.assertSequenceEqual(B.objects.exclude(a__flag=None), [b2])
def test_extra_join_condition(self):
# A crude check that content_type_id is taken in account in the
# join/subquery condition.
self.assertIn(
"content_type_id", str(B.objects.exclude(a__flag=None).query).lower()
)
# No need for any joins - the join from inner query can be trimmed in
# this case (but not in the above case as no a objects at all for given
# B would then fail).
self.assertNotIn(" join ", str(B.objects.exclude(a__flag=True).query).lower())
self.assertIn(
"content_type_id", str(B.objects.exclude(a__flag=True).query).lower()
)
def test_annotate(self):
hs1 = HasLinkThing.objects.create()
hs2 = HasLinkThing.objects.create()
HasLinkThing.objects.create()
b = Board.objects.create(name=str(hs1.pk))
Link.objects.create(content_object=hs2)
link = Link.objects.create(content_object=hs1)
Link.objects.create(content_object=b)
qs = HasLinkThing.objects.annotate(Sum("links")).filter(pk=hs1.pk)
# If content_type restriction isn't in the query's join condition,
# then wrong results are produced here as the link to b will also match
# (b and hs1 have equal pks).
self.assertEqual(qs.count(), 1)
self.assertEqual(qs[0].links__sum, link.id)
link.delete()
# Now if we don't have proper left join, we will not produce any
# results at all here.
# clear cached results
qs = qs.all()
self.assertEqual(qs.count(), 1)
# Note - 0 here would be a nicer result...
self.assertIs(qs[0].links__sum, None)
# Finally test that filtering works.
self.assertEqual(qs.filter(links__sum__isnull=True).count(), 1)
self.assertEqual(qs.filter(links__sum__isnull=False).count(), 0)
def test_filter_targets_related_pk(self):
# Use hardcoded PKs to ensure different PKs for "link" and "hs2"
# objects.
HasLinkThing.objects.create(pk=1)
hs2 = HasLinkThing.objects.create(pk=2)
link = Link.objects.create(content_object=hs2, pk=1)
self.assertNotEqual(link.object_id, link.pk)
self.assertSequenceEqual(HasLinkThing.objects.filter(links=link.pk), [hs2])
def test_editable_generic_rel(self):
GenericRelationForm = modelform_factory(HasLinkThing, fields="__all__")
form = GenericRelationForm()
self.assertIn("links", form.fields)
form = GenericRelationForm({"links": None})
self.assertTrue(form.is_valid())
form.save()
links = HasLinkThing._meta.get_field("links")
self.assertEqual(links.save_form_data_calls, 1)
def test_ticket_22998(self):
related = Related.objects.create()
content = Content.objects.create(related_obj=related)
Node.objects.create(content=content)
# deleting the Related cascades to the Content cascades to the Node,
# where the pre_delete signal should fire and prevent deletion.
with self.assertRaises(ProtectedError):
related.delete()
def test_ticket_22982(self):
place = Place.objects.create(name="My Place")
self.assertIn("GenericRelatedObjectManager", str(place.links))
def test_filter_on_related_proxy_model(self):
place = Place.objects.create()
Link.objects.create(content_object=place)
self.assertEqual(Place.objects.get(link_proxy__object_id=place.id), place)
def test_generic_reverse_relation_with_mti(self):
"""
Filtering with a reverse generic relation, where the GenericRelation
comes from multi-table inheritance.
"""
place = Place.objects.create(name="Test Place")
link = Link.objects.create(content_object=place)
result = Link.objects.filter(places=place)
self.assertCountEqual(result, [link])
def test_generic_reverse_relation_with_abc(self):
"""
The reverse generic relation accessor (targets) is created if the
GenericRelation comes from an abstract base model (HasLinks).
"""
thing = HasLinkThing.objects.create()
link = Link.objects.create(content_object=thing)
self.assertCountEqual(link.targets.all(), [thing])
|
e5dd4e729164116be71f929b6a20276eaa4d2ce3b69bc5347afdf2501f703cb3 | from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models
__all__ = (
"Link",
"Place",
"Restaurant",
"Person",
"Address",
"CharLink",
"TextLink",
"OddRelation1",
"OddRelation2",
"Contact",
"Organization",
"Note",
"Company",
)
class Link(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
class LinkProxy(Link):
class Meta:
proxy = True
class Place(models.Model):
name = models.CharField(max_length=100)
links = GenericRelation(Link, related_query_name="places")
link_proxy = GenericRelation(LinkProxy)
class Restaurant(Place):
pass
class Cafe(Restaurant):
pass
class Address(models.Model):
street = models.CharField(max_length=80)
city = models.CharField(max_length=50)
state = models.CharField(max_length=2)
zipcode = models.CharField(max_length=5)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
class Person(models.Model):
account = models.IntegerField(primary_key=True)
name = models.CharField(max_length=128)
addresses = GenericRelation(Address)
class CharLink(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.CharField(max_length=100)
content_object = GenericForeignKey()
class TextLink(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.TextField()
content_object = GenericForeignKey()
class OddRelation1(models.Model):
name = models.CharField(max_length=100)
clinks = GenericRelation(CharLink)
class OddRelation2(models.Model):
name = models.CharField(max_length=100)
tlinks = GenericRelation(TextLink)
# models for test_q_object_or:
class Note(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
note = models.TextField()
class Contact(models.Model):
notes = GenericRelation(Note)
class Organization(models.Model):
name = models.CharField(max_length=255)
contacts = models.ManyToManyField(Contact, related_name="organizations")
class Company(models.Model):
name = models.CharField(max_length=100)
links = GenericRelation(Link)
class Team(models.Model):
name = models.CharField(max_length=15)
def __len__(self):
return 0
class Guild(models.Model):
name = models.CharField(max_length=15)
def __bool__(self):
return False
class Tag(models.Model):
content_type = models.ForeignKey(
ContentType, models.CASCADE, related_name="g_r_r_tags"
)
object_id = models.CharField(max_length=15)
content_object = GenericForeignKey()
label = models.CharField(max_length=15)
class Board(models.Model):
name = models.CharField(primary_key=True, max_length=25)
class SpecialGenericRelation(GenericRelation):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.editable = True
self.save_form_data_calls = 0
def save_form_data(self, *args, **kwargs):
self.save_form_data_calls += 1
class HasLinks(models.Model):
links = SpecialGenericRelation(Link, related_query_name="targets")
class Meta:
abstract = True
class HasLinkThing(HasLinks):
pass
class A(models.Model):
flag = models.BooleanField(null=True)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
class B(models.Model):
a = GenericRelation(A)
class Meta:
ordering = ("id",)
class C(models.Model):
b = models.ForeignKey(B, models.CASCADE)
class Meta:
ordering = ("id",)
class D(models.Model):
b = models.ForeignKey(B, models.SET_NULL, null=True)
class Meta:
ordering = ("id",)
# Ticket #22998
class Node(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content = GenericForeignKey("content_type", "object_id")
class Content(models.Model):
nodes = GenericRelation(Node)
related_obj = models.ForeignKey("Related", models.CASCADE)
class Related(models.Model):
pass
def prevent_deletes(sender, instance, **kwargs):
raise models.ProtectedError("Not allowed to delete.", [instance])
models.signals.pre_delete.connect(prevent_deletes, sender=Node)
|
67c0cd06eb851f3f502a0b08ee3dda7c0495df7f8e2dfacfbbd76e48ee958151 | import datetime
import json
from decimal import Decimal
from django import forms
from django.core import exceptions, serializers
from django.db.models import DateField, DateTimeField, F, Func, Value
from django.http import QueryDict
from django.test import override_settings
from django.test.utils import isolate_apps
from django.utils import timezone
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import (
BigAutoFieldModel,
PostgreSQLModel,
RangeLookupsModel,
RangesModel,
SmallAutoFieldModel,
)
try:
from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange
from django.contrib.postgres import fields as pg_fields
from django.contrib.postgres import forms as pg_forms
from django.contrib.postgres.validators import (
RangeMaxValueValidator,
RangeMinValueValidator,
)
except ImportError:
pass
@isolate_apps("postgres_tests")
class BasicTests(PostgreSQLSimpleTestCase):
def test_get_field_display(self):
class Model(PostgreSQLModel):
field = pg_fields.IntegerRangeField(
choices=[
["1-50", [((1, 25), "1-25"), ([26, 50], "26-50")]],
((51, 100), "51-100"),
],
)
tests = (
((1, 25), "1-25"),
([26, 50], "26-50"),
((51, 100), "51-100"),
((1, 2), "(1, 2)"),
([1, 2], "[1, 2]"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = Model(field=value)
self.assertEqual(instance.get_field_display(), display)
def test_discrete_range_fields_unsupported_default_bounds(self):
discrete_range_types = [
pg_fields.BigIntegerRangeField,
pg_fields.IntegerRangeField,
pg_fields.DateRangeField,
]
for field_type in discrete_range_types:
msg = f"Cannot use 'default_bounds' with {field_type.__name__}."
with self.assertRaisesMessage(TypeError, msg):
field_type(choices=[((51, 100), "51-100")], default_bounds="[]")
def test_continuous_range_fields_default_bounds(self):
continuous_range_types = [
pg_fields.DecimalRangeField,
pg_fields.DateTimeRangeField,
]
for field_type in continuous_range_types:
field = field_type(choices=[((51, 100), "51-100")], default_bounds="[]")
self.assertEqual(field.default_bounds, "[]")
def test_invalid_default_bounds(self):
tests = [")]", ")[", "](", "])", "([", "[(", "x", "", None]
msg = "default_bounds must be one of '[)', '(]', '()', or '[]'."
for invalid_bounds in tests:
with self.assertRaisesMessage(ValueError, msg):
pg_fields.DecimalRangeField(default_bounds=invalid_bounds)
def test_deconstruct(self):
field = pg_fields.DecimalRangeField()
*_, kwargs = field.deconstruct()
self.assertEqual(kwargs, {})
field = pg_fields.DecimalRangeField(default_bounds="[]")
*_, kwargs = field.deconstruct()
self.assertEqual(kwargs, {"default_bounds": "[]"})
class TestSaveLoad(PostgreSQLTestCase):
def test_all_fields(self):
now = timezone.now()
instance = RangesModel(
ints=NumericRange(0, 10),
bigints=NumericRange(10, 20),
decimals=NumericRange(20, 30),
timestamps=DateTimeTZRange(now - datetime.timedelta(hours=1), now),
dates=DateRange(now.date() - datetime.timedelta(days=1), now.date()),
)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(instance.ints, loaded.ints)
self.assertEqual(instance.bigints, loaded.bigints)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.timestamps, loaded.timestamps)
self.assertEqual(instance.dates, loaded.dates)
def test_range_object(self):
r = NumericRange(0, 10)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_tuple(self):
instance = RangesModel(ints=(0, 10))
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(NumericRange(0, 10), loaded.ints)
def test_tuple_range_with_default_bounds(self):
range_ = (timezone.now(), timezone.now() + datetime.timedelta(hours=1))
RangesModel.objects.create(timestamps_closed_bounds=range_, timestamps=range_)
loaded = RangesModel.objects.get()
self.assertEqual(
loaded.timestamps_closed_bounds,
DateTimeTZRange(range_[0], range_[1], "[]"),
)
self.assertEqual(
loaded.timestamps,
DateTimeTZRange(range_[0], range_[1], "[)"),
)
def test_range_object_boundaries(self):
r = NumericRange(0, 10, "[]")
instance = RangesModel(decimals=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.decimals)
self.assertIn(10, loaded.decimals)
def test_range_object_boundaries_range_with_default_bounds(self):
range_ = DateTimeTZRange(
timezone.now(),
timezone.now() + datetime.timedelta(hours=1),
bounds="()",
)
RangesModel.objects.create(timestamps_closed_bounds=range_)
loaded = RangesModel.objects.get()
self.assertEqual(loaded.timestamps_closed_bounds, range_)
def test_unbounded(self):
r = NumericRange(None, None, "()")
instance = RangesModel(decimals=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.decimals)
def test_empty(self):
r = NumericRange(empty=True)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_null(self):
instance = RangesModel(ints=None)
instance.save()
loaded = RangesModel.objects.get()
self.assertIsNone(loaded.ints)
def test_model_set_on_base_field(self):
instance = RangesModel()
field = instance._meta.get_field("ints")
self.assertEqual(field.model, RangesModel)
self.assertEqual(field.base_field.model, RangesModel)
class TestRangeContainsLookup(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.timestamps = [
datetime.datetime(year=2016, month=1, day=1),
datetime.datetime(year=2016, month=1, day=2, hour=1),
datetime.datetime(year=2016, month=1, day=2, hour=12),
datetime.datetime(year=2016, month=1, day=3),
datetime.datetime(year=2016, month=1, day=3, hour=1),
datetime.datetime(year=2016, month=2, day=2),
]
cls.aware_timestamps = [
timezone.make_aware(timestamp) for timestamp in cls.timestamps
]
cls.dates = [
datetime.date(year=2016, month=1, day=1),
datetime.date(year=2016, month=1, day=2),
datetime.date(year=2016, month=1, day=3),
datetime.date(year=2016, month=1, day=4),
datetime.date(year=2016, month=2, day=2),
datetime.date(year=2016, month=2, day=3),
]
cls.obj = RangesModel.objects.create(
dates=(cls.dates[0], cls.dates[3]),
dates_inner=(cls.dates[1], cls.dates[2]),
timestamps=(cls.timestamps[0], cls.timestamps[3]),
timestamps_inner=(cls.timestamps[1], cls.timestamps[2]),
)
cls.aware_obj = RangesModel.objects.create(
dates=(cls.dates[0], cls.dates[3]),
dates_inner=(cls.dates[1], cls.dates[2]),
timestamps=(cls.aware_timestamps[0], cls.aware_timestamps[3]),
timestamps_inner=(cls.timestamps[1], cls.timestamps[2]),
)
# Objects that don't match any queries.
for i in range(3, 4):
RangesModel.objects.create(
dates=(cls.dates[i], cls.dates[i + 1]),
timestamps=(cls.timestamps[i], cls.timestamps[i + 1]),
)
RangesModel.objects.create(
dates=(cls.dates[i], cls.dates[i + 1]),
timestamps=(cls.aware_timestamps[i], cls.aware_timestamps[i + 1]),
)
def test_datetime_range_contains(self):
filter_args = (
self.timestamps[1],
self.aware_timestamps[1],
(self.timestamps[1], self.timestamps[2]),
(self.aware_timestamps[1], self.aware_timestamps[2]),
Value(self.dates[0]),
Func(F("dates"), function="lower", output_field=DateTimeField()),
F("timestamps_inner"),
)
for filter_arg in filter_args:
with self.subTest(filter_arg=filter_arg):
self.assertCountEqual(
RangesModel.objects.filter(**{"timestamps__contains": filter_arg}),
[self.obj, self.aware_obj],
)
def test_date_range_contains(self):
filter_args = (
self.timestamps[1],
(self.dates[1], self.dates[2]),
Value(self.dates[0], output_field=DateField()),
Func(F("timestamps"), function="lower", output_field=DateField()),
F("dates_inner"),
)
for filter_arg in filter_args:
with self.subTest(filter_arg=filter_arg):
self.assertCountEqual(
RangesModel.objects.filter(**{"dates__contains": filter_arg}),
[self.obj, self.aware_obj],
)
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = RangesModel.objects.bulk_create(
[
RangesModel(ints=NumericRange(0, 10)),
RangesModel(ints=NumericRange(5, 15)),
RangesModel(ints=NumericRange(None, 0)),
RangesModel(ints=NumericRange(empty=True)),
RangesModel(ints=None),
]
)
def test_exact(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__exact=NumericRange(0, 10)),
[self.objs[0]],
)
def test_isnull(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isnull=True),
[self.objs[4]],
)
def test_isempty(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isempty=True),
[self.objs[3]],
)
def test_contains(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=8),
[self.objs[0], self.objs[1]],
)
def test_contains_range(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=NumericRange(3, 8)),
[self.objs[0]],
)
def test_contained_by(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contained_by=NumericRange(0, 20)),
[self.objs[0], self.objs[1], self.objs[3]],
)
def test_overlap(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__overlap=NumericRange(3, 8)),
[self.objs[0], self.objs[1]],
)
def test_fully_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_lt=NumericRange(5, 10)),
[self.objs[2]],
)
def test_fully_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_gt=NumericRange(5, 10)),
[],
)
def test_not_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_lt=NumericRange(5, 10)),
[self.objs[1]],
)
def test_not_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_gt=NumericRange(5, 10)),
[self.objs[0], self.objs[2]],
)
def test_adjacent_to(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__adjacent_to=NumericRange(0, 5)),
[self.objs[1], self.objs[2]],
)
def test_startswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith=0),
[self.objs[0]],
)
def test_endswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__endswith=0),
[self.objs[2]],
)
def test_startswith_chaining(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith__gte=0),
[self.objs[0], self.objs[1]],
)
def test_bound_type(self):
decimals = RangesModel.objects.bulk_create(
[
RangesModel(decimals=NumericRange(None, 10)),
RangesModel(decimals=NumericRange(10, None)),
RangesModel(decimals=NumericRange(5, 15)),
RangesModel(decimals=NumericRange(5, 15, "(]")),
]
)
tests = [
("lower_inc", True, [decimals[1], decimals[2]]),
("lower_inc", False, [decimals[0], decimals[3]]),
("lower_inf", True, [decimals[0]]),
("lower_inf", False, [decimals[1], decimals[2], decimals[3]]),
("upper_inc", True, [decimals[3]]),
("upper_inc", False, [decimals[0], decimals[1], decimals[2]]),
("upper_inf", True, [decimals[1]]),
("upper_inf", False, [decimals[0], decimals[2], decimals[3]]),
]
for lookup, filter_arg, excepted_result in tests:
with self.subTest(lookup=lookup, filter_arg=filter_arg):
self.assertSequenceEqual(
RangesModel.objects.filter(**{"decimals__%s" % lookup: filter_arg}),
excepted_result,
)
class TestQueryingWithRanges(PostgreSQLTestCase):
def test_date_range(self):
objs = [
RangeLookupsModel.objects.create(date="2015-01-01"),
RangeLookupsModel.objects.create(date="2015-05-05"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
date__contained_by=DateRange("2015-01-01", "2015-05-04")
),
[objs[0]],
)
def test_date_range_datetime_field(self):
objs = [
RangeLookupsModel.objects.create(timestamp="2015-01-01"),
RangeLookupsModel.objects.create(timestamp="2015-05-05"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
timestamp__date__contained_by=DateRange("2015-01-01", "2015-05-04")
),
[objs[0]],
)
def test_datetime_range(self):
objs = [
RangeLookupsModel.objects.create(timestamp="2015-01-01T09:00:00"),
RangeLookupsModel.objects.create(timestamp="2015-05-05T17:00:00"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
timestamp__contained_by=DateTimeTZRange(
"2015-01-01T09:00", "2015-05-04T23:55"
)
),
[objs[0]],
)
def test_small_integer_field_contained_by(self):
objs = [
RangeLookupsModel.objects.create(small_integer=8),
RangeLookupsModel.objects.create(small_integer=4),
RangeLookupsModel.objects.create(small_integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
small_integer__contained_by=NumericRange(4, 6)
),
[objs[1]],
)
def test_integer_range(self):
objs = [
RangeLookupsModel.objects.create(integer=5),
RangeLookupsModel.objects.create(integer=99),
RangeLookupsModel.objects.create(integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(integer__contained_by=NumericRange(1, 98)),
[objs[0]],
)
def test_biginteger_range(self):
objs = [
RangeLookupsModel.objects.create(big_integer=5),
RangeLookupsModel.objects.create(big_integer=99),
RangeLookupsModel.objects.create(big_integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
big_integer__contained_by=NumericRange(1, 98)
),
[objs[0]],
)
def test_decimal_field_contained_by(self):
objs = [
RangeLookupsModel.objects.create(decimal_field=Decimal("1.33")),
RangeLookupsModel.objects.create(decimal_field=Decimal("2.88")),
RangeLookupsModel.objects.create(decimal_field=Decimal("99.17")),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
decimal_field__contained_by=NumericRange(
Decimal("1.89"), Decimal("7.91")
),
),
[objs[1]],
)
def test_float_range(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=NumericRange(1, 98)),
[objs[0]],
)
def test_small_auto_field_contained_by(self):
objs = SmallAutoFieldModel.objects.bulk_create(
[SmallAutoFieldModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
SmallAutoFieldModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_auto_field_contained_by(self):
objs = RangeLookupsModel.objects.bulk_create(
[RangeLookupsModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_big_auto_field_contained_by(self):
objs = BigAutoFieldModel.objects.bulk_create(
[BigAutoFieldModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
BigAutoFieldModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_f_ranges(self):
parent = RangesModel.objects.create(decimals=NumericRange(0, 10))
objs = [
RangeLookupsModel.objects.create(float=5, parent=parent),
RangeLookupsModel.objects.create(float=99, parent=parent),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=F("parent__decimals")),
[objs[0]],
)
def test_exclude(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.exclude(float__contained_by=NumericRange(0, 100)),
[objs[2]],
)
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = (
'[{"fields": {"ints": "{\\"upper\\": \\"10\\", \\"lower\\": \\"0\\", '
'\\"bounds\\": \\"[)\\"}", "decimals": "{\\"empty\\": true}", '
'"bigints": null, "timestamps": '
'"{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", '
'\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"[)\\"}", '
'"timestamps_inner": null, '
'"timestamps_closed_bounds": "{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", '
'\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"()\\"}", '
'"dates": "{\\"upper\\": \\"2014-02-02\\", \\"lower\\": \\"2014-01-01\\", '
'\\"bounds\\": \\"[)\\"}", "dates_inner": null }, '
'"model": "postgres_tests.rangesmodel", "pk": null}]'
)
lower_date = datetime.date(2014, 1, 1)
upper_date = datetime.date(2014, 2, 2)
lower_dt = datetime.datetime(2014, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc)
upper_dt = datetime.datetime(2014, 2, 2, 12, 12, 12, tzinfo=datetime.timezone.utc)
def test_dumping(self):
instance = RangesModel(
ints=NumericRange(0, 10),
decimals=NumericRange(empty=True),
timestamps=DateTimeTZRange(self.lower_dt, self.upper_dt),
timestamps_closed_bounds=DateTimeTZRange(
self.lower_dt,
self.upper_dt,
bounds="()",
),
dates=DateRange(self.lower_date, self.upper_date),
)
data = serializers.serialize("json", [instance])
dumped = json.loads(data)
for field in ("ints", "dates", "timestamps", "timestamps_closed_bounds"):
dumped[0]["fields"][field] = json.loads(dumped[0]["fields"][field])
check = json.loads(self.test_data)
for field in ("ints", "dates", "timestamps", "timestamps_closed_bounds"):
check[0]["fields"][field] = json.loads(check[0]["fields"][field])
self.assertEqual(dumped, check)
def test_loading(self):
instance = list(serializers.deserialize("json", self.test_data))[0].object
self.assertEqual(instance.ints, NumericRange(0, 10))
self.assertEqual(instance.decimals, NumericRange(empty=True))
self.assertIsNone(instance.bigints)
self.assertEqual(instance.dates, DateRange(self.lower_date, self.upper_date))
self.assertEqual(
instance.timestamps, DateTimeTZRange(self.lower_dt, self.upper_dt)
)
self.assertEqual(
instance.timestamps_closed_bounds,
DateTimeTZRange(self.lower_dt, self.upper_dt, bounds="()"),
)
def test_serialize_range_with_null(self):
instance = RangesModel(ints=NumericRange(None, 10))
data = serializers.serialize("json", [instance])
new_instance = list(serializers.deserialize("json", data))[0].object
self.assertEqual(new_instance.ints, NumericRange(None, 10))
instance = RangesModel(ints=NumericRange(10, None))
data = serializers.serialize("json", [instance])
new_instance = list(serializers.deserialize("json", data))[0].object
self.assertEqual(new_instance.ints, NumericRange(10, None))
class TestChecks(PostgreSQLSimpleTestCase):
def test_choices_tuple_list(self):
class Model(PostgreSQLModel):
field = pg_fields.IntegerRangeField(
choices=[
["1-50", [((1, 25), "1-25"), ([26, 50], "26-50")]],
((51, 100), "51-100"),
],
)
self.assertEqual(Model._meta.get_field("field").check(), [])
class TestValidators(PostgreSQLSimpleTestCase):
def test_max(self):
validator = RangeMaxValueValidator(5)
validator(NumericRange(0, 5))
msg = "Ensure that this range is completely less than or equal to 5."
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], msg)
self.assertEqual(cm.exception.code, "max_value")
with self.assertRaisesMessage(exceptions.ValidationError, msg):
validator(NumericRange(0, None)) # an unbound range
def test_min(self):
validator = RangeMinValueValidator(5)
validator(NumericRange(10, 15))
msg = "Ensure that this range is completely greater than or equal to 5."
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], msg)
self.assertEqual(cm.exception.code, "min_value")
with self.assertRaisesMessage(exceptions.ValidationError, msg):
validator(NumericRange(None, 10)) # an unbound range
class TestFormField(PostgreSQLSimpleTestCase):
def test_valid_integer(self):
field = pg_forms.IntegerRangeField()
value = field.clean(["1", "2"])
self.assertEqual(value, NumericRange(1, 2))
def test_valid_decimal(self):
field = pg_forms.DecimalRangeField()
value = field.clean(["1.12345", "2.001"])
self.assertEqual(value, NumericRange(Decimal("1.12345"), Decimal("2.001")))
def test_valid_timestamps(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(["01/01/2014 00:00:00", "02/02/2014 12:12:12"])
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(value, DateTimeTZRange(lower, upper))
def test_valid_dates(self):
field = pg_forms.DateRangeField()
value = field.clean(["01/01/2014", "02/02/2014"])
lower = datetime.date(2014, 1, 1)
upper = datetime.date(2014, 2, 2)
self.assertEqual(value, DateRange(lower, upper))
def test_using_split_datetime_widget(self):
class SplitDateTimeRangeField(pg_forms.DateTimeRangeField):
base_field = forms.SplitDateTimeField
class SplitForm(forms.Form):
field = SplitDateTimeRangeField()
form = SplitForm()
self.assertHTMLEqual(
str(form),
"""
<tr>
<th>
<label>Field:</label>
</th>
<td>
<input id="id_field_0_0" name="field_0_0" type="text">
<input id="id_field_0_1" name="field_0_1" type="text">
<input id="id_field_1_0" name="field_1_0" type="text">
<input id="id_field_1_1" name="field_1_1" type="text">
</td>
</tr>
""",
)
form = SplitForm(
{
"field_0_0": "01/01/2014",
"field_0_1": "00:00:00",
"field_1_0": "02/02/2014",
"field_1_1": "12:12:12",
}
)
self.assertTrue(form.is_valid())
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(form.cleaned_data["field"], DateTimeTZRange(lower, upper))
def test_none(self):
field = pg_forms.IntegerRangeField(required=False)
value = field.clean(["", ""])
self.assertIsNone(value)
def test_datetime_form_as_table(self):
class DateTimeRangeForm(forms.Form):
datetime_field = pg_forms.DateTimeRangeField(show_hidden_initial=True)
form = DateTimeRangeForm()
self.assertHTMLEqual(
form.as_table(),
"""
<tr><th>
<label>Datetime field:</label>
</th><td>
<input type="text" name="datetime_field_0" id="id_datetime_field_0">
<input type="text" name="datetime_field_1" id="id_datetime_field_1">
<input type="hidden" name="initial-datetime_field_0"
id="initial-id_datetime_field_0">
<input type="hidden" name="initial-datetime_field_1"
id="initial-id_datetime_field_1">
</td></tr>
""",
)
form = DateTimeRangeForm(
{
"datetime_field_0": "2010-01-01 11:13:00",
"datetime_field_1": "2020-12-12 16:59:00",
}
)
self.assertHTMLEqual(
form.as_table(),
"""
<tr><th>
<label>Datetime field:</label>
</th><td>
<input type="text" name="datetime_field_0"
value="2010-01-01 11:13:00" id="id_datetime_field_0">
<input type="text" name="datetime_field_1"
value="2020-12-12 16:59:00" id="id_datetime_field_1">
<input type="hidden" name="initial-datetime_field_0"
value="2010-01-01 11:13:00" id="initial-id_datetime_field_0">
<input type="hidden" name="initial-datetime_field_1"
value="2020-12-12 16:59:00" id="initial-id_datetime_field_1"></td></tr>
""",
)
def test_datetime_form_initial_data(self):
class DateTimeRangeForm(forms.Form):
datetime_field = pg_forms.DateTimeRangeField(show_hidden_initial=True)
data = QueryDict(mutable=True)
data.update(
{
"datetime_field_0": "2010-01-01 11:13:00",
"datetime_field_1": "",
"initial-datetime_field_0": "2010-01-01 10:12:00",
"initial-datetime_field_1": "",
}
)
form = DateTimeRangeForm(data=data)
self.assertTrue(form.has_changed())
data["initial-datetime_field_0"] = "2010-01-01 11:13:00"
form = DateTimeRangeForm(data=data)
self.assertFalse(form.has_changed())
def test_rendering(self):
class RangeForm(forms.Form):
ints = pg_forms.IntegerRangeField()
self.assertHTMLEqual(
str(RangeForm()),
"""
<tr>
<th><label>Ints:</label></th>
<td>
<input id="id_ints_0" name="ints_0" type="number">
<input id="id_ints_1" name="ints_1" type="number">
</td>
</tr>
""",
)
def test_integer_lower_bound_higher(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["10", "2"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_integer_open(self):
field = pg_forms.IntegerRangeField()
value = field.clean(["", "0"])
self.assertEqual(value, NumericRange(None, 0))
def test_integer_incorrect_data_type(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("1")
self.assertEqual(cm.exception.messages[0], "Enter two whole numbers.")
self.assertEqual(cm.exception.code, "invalid")
def test_integer_invalid_lower(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["a", "2"])
self.assertEqual(cm.exception.messages[0], "Enter a whole number.")
def test_integer_invalid_upper(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["1", "b"])
self.assertEqual(cm.exception.messages[0], "Enter a whole number.")
def test_integer_required(self):
field = pg_forms.IntegerRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean([1, ""])
self.assertEqual(value, NumericRange(1, None))
def test_decimal_lower_bound_higher(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["1.8", "1.6"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_decimal_open(self):
field = pg_forms.DecimalRangeField()
value = field.clean(["", "3.1415926"])
self.assertEqual(value, NumericRange(None, Decimal("3.1415926")))
def test_decimal_incorrect_data_type(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("1.6")
self.assertEqual(cm.exception.messages[0], "Enter two numbers.")
self.assertEqual(cm.exception.code, "invalid")
def test_decimal_invalid_lower(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["a", "3.1415926"])
self.assertEqual(cm.exception.messages[0], "Enter a number.")
def test_decimal_invalid_upper(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["1.61803399", "b"])
self.assertEqual(cm.exception.messages[0], "Enter a number.")
def test_decimal_required(self):
field = pg_forms.DecimalRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean(["1.61803399", ""])
self.assertEqual(value, NumericRange(Decimal("1.61803399"), None))
def test_date_lower_bound_higher(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2013-04-09", "1976-04-16"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_date_open(self):
field = pg_forms.DateRangeField()
value = field.clean(["", "2013-04-09"])
self.assertEqual(value, DateRange(None, datetime.date(2013, 4, 9)))
def test_date_incorrect_data_type(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("1")
self.assertEqual(cm.exception.messages[0], "Enter two valid dates.")
self.assertEqual(cm.exception.code, "invalid")
def test_date_invalid_lower(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["a", "2013-04-09"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date.")
def test_date_invalid_upper(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2013-04-09", "b"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date.")
def test_date_required(self):
field = pg_forms.DateRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean(["1976-04-16", ""])
self.assertEqual(value, DateRange(datetime.date(1976, 4, 16), None))
def test_date_has_changed_first(self):
self.assertTrue(
pg_forms.DateRangeField().has_changed(
["2010-01-01", "2020-12-12"],
["2010-01-31", "2020-12-12"],
)
)
def test_date_has_changed_last(self):
self.assertTrue(
pg_forms.DateRangeField().has_changed(
["2010-01-01", "2020-12-12"],
["2010-01-01", "2020-12-31"],
)
)
def test_datetime_lower_bound_higher(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2006-10-25 14:59", "2006-10-25 14:58"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_datetime_open(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(["", "2013-04-09 11:45"])
self.assertEqual(
value, DateTimeTZRange(None, datetime.datetime(2013, 4, 9, 11, 45))
)
def test_datetime_incorrect_data_type(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("2013-04-09 11:45")
self.assertEqual(cm.exception.messages[0], "Enter two valid date/times.")
self.assertEqual(cm.exception.code, "invalid")
def test_datetime_invalid_lower(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["45", "2013-04-09 11:45"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date/time.")
def test_datetime_invalid_upper(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2013-04-09 11:45", "sweet pickles"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date/time.")
def test_datetime_required(self):
field = pg_forms.DateTimeRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean(["2013-04-09 11:45", ""])
self.assertEqual(
value, DateTimeTZRange(datetime.datetime(2013, 4, 9, 11, 45), None)
)
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Johannesburg")
def test_datetime_prepare_value(self):
field = pg_forms.DateTimeRangeField()
value = field.prepare_value(
DateTimeTZRange(
datetime.datetime(2015, 5, 22, 16, 6, 33, tzinfo=datetime.timezone.utc),
None,
)
)
self.assertEqual(value, [datetime.datetime(2015, 5, 22, 18, 6, 33), None])
def test_datetime_has_changed_first(self):
self.assertTrue(
pg_forms.DateTimeRangeField().has_changed(
["2010-01-01 00:00", "2020-12-12 00:00"],
["2010-01-31 23:00", "2020-12-12 00:00"],
)
)
def test_datetime_has_changed_last(self):
self.assertTrue(
pg_forms.DateTimeRangeField().has_changed(
["2010-01-01 00:00", "2020-12-12 00:00"],
["2010-01-01 00:00", "2020-12-31 23:00"],
)
)
def test_model_field_formfield_integer(self):
model_field = pg_fields.IntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
self.assertEqual(form_field.range_kwargs, {})
def test_model_field_formfield_biginteger(self):
model_field = pg_fields.BigIntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
self.assertEqual(form_field.range_kwargs, {})
def test_model_field_formfield_float(self):
model_field = pg_fields.DecimalRangeField(default_bounds="()")
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DecimalRangeField)
self.assertEqual(form_field.range_kwargs, {"bounds": "()"})
def test_model_field_formfield_date(self):
model_field = pg_fields.DateRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateRangeField)
self.assertEqual(form_field.range_kwargs, {})
def test_model_field_formfield_datetime(self):
model_field = pg_fields.DateTimeRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateTimeRangeField)
self.assertEqual(
form_field.range_kwargs,
{"bounds": pg_fields.ranges.CANONICAL_RANGE_BOUNDS},
)
def test_model_field_formfield_datetime_default_bounds(self):
model_field = pg_fields.DateTimeRangeField(default_bounds="[]")
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateTimeRangeField)
self.assertEqual(form_field.range_kwargs, {"bounds": "[]"})
def test_model_field_with_default_bounds(self):
field = pg_forms.DateTimeRangeField(default_bounds="[]")
value = field.clean(["2014-01-01 00:00:00", "2014-02-03 12:13:14"])
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 3, 12, 13, 14)
self.assertEqual(value, DateTimeTZRange(lower, upper, "[]"))
def test_has_changed(self):
for field, value in (
(pg_forms.DateRangeField(), ["2010-01-01", "2020-12-12"]),
(pg_forms.DateTimeRangeField(), ["2010-01-01 11:13", "2020-12-12 14:52"]),
(pg_forms.IntegerRangeField(), [1, 2]),
(pg_forms.DecimalRangeField(), ["1.12345", "2.001"]),
):
with self.subTest(field=field.__class__.__name__):
self.assertTrue(field.has_changed(None, value))
self.assertTrue(field.has_changed([value[0], ""], value))
self.assertTrue(field.has_changed(["", value[1]], value))
self.assertFalse(field.has_changed(value, value))
class TestWidget(PostgreSQLSimpleTestCase):
def test_range_widget(self):
f = pg_forms.ranges.DateTimeRangeField()
self.assertHTMLEqual(
f.widget.render("datetimerange", ""),
'<input type="text" name="datetimerange_0">'
'<input type="text" name="datetimerange_1">',
)
self.assertHTMLEqual(
f.widget.render("datetimerange", None),
'<input type="text" name="datetimerange_0">'
'<input type="text" name="datetimerange_1">',
)
dt_range = DateTimeTZRange(
datetime.datetime(2006, 1, 10, 7, 30), datetime.datetime(2006, 2, 12, 9, 50)
)
self.assertHTMLEqual(
f.widget.render("datetimerange", dt_range),
'<input type="text" name="datetimerange_0" value="2006-01-10 07:30:00">'
'<input type="text" name="datetimerange_1" value="2006-02-12 09:50:00">',
)
def test_range_widget_render_tuple_value(self):
field = pg_forms.ranges.DateTimeRangeField()
dt_range_tuple = (
datetime.datetime(2022, 4, 22, 10, 24),
datetime.datetime(2022, 5, 12, 9, 25),
)
self.assertHTMLEqual(
field.widget.render("datetimerange", dt_range_tuple),
'<input type="text" name="datetimerange_0" value="2022-04-22 10:24:00">'
'<input type="text" name="datetimerange_1" value="2022-05-12 09:25:00">',
)
|
f1f15c0e61f0e788125ecd5793d37496833d56cd3cd9e68d125b13b1192ddab0 | from unittest import mock
from django.contrib.postgres.indexes import (
BloomIndex,
BrinIndex,
BTreeIndex,
GinIndex,
GistIndex,
HashIndex,
OpClass,
PostgresIndex,
SpGistIndex,
)
from django.db import NotSupportedError, connection
from django.db.models import CharField, F, Index, Q
from django.db.models.functions import Cast, Collate, Length, Lower
from django.test import skipUnlessDBFeature
from django.test.utils import modify_settings, register_lookup
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .fields import SearchVector, SearchVectorField
from .models import CharFieldModel, IntegerArrayModel, Scene, TextFieldModel
class IndexTestMixin:
def test_name_auto_generation(self):
index = self.index_class(fields=["field"])
index.set_name_with_model(CharFieldModel)
self.assertRegex(
index.name, r"postgres_te_field_[0-9a-f]{6}_%s" % self.index_class.suffix
)
def test_deconstruction_no_customization(self):
index = self.index_class(
fields=["title"], name="test_title_%s" % self.index_class.suffix
)
path, args, kwargs = index.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.indexes.%s" % self.index_class.__name__
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{"fields": ["title"], "name": "test_title_%s" % self.index_class.suffix},
)
def test_deconstruction_with_expressions_no_customization(self):
name = f"test_title_{self.index_class.suffix}"
index = self.index_class(Lower("title"), name=name)
path, args, kwargs = index.deconstruct()
self.assertEqual(
path,
f"django.contrib.postgres.indexes.{self.index_class.__name__}",
)
self.assertEqual(args, (Lower("title"),))
self.assertEqual(kwargs, {"name": name})
class BloomIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BloomIndex
def test_suffix(self):
self.assertEqual(BloomIndex.suffix, "bloom")
def test_deconstruction(self):
index = BloomIndex(fields=["title"], name="test_bloom", length=80, columns=[4])
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BloomIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_bloom",
"length": 80,
"columns": [4],
},
)
def test_invalid_fields(self):
msg = "Bloom indexes support a maximum of 32 fields."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"] * 33, name="test_bloom")
def test_invalid_columns(self):
msg = "BloomIndex.columns must be a list or tuple."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns="x")
msg = "BloomIndex.columns cannot have more values than fields."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns=[4, 3])
def test_invalid_columns_value(self):
msg = "BloomIndex.columns must contain integers from 1 to 4095."
for length in (0, 4096):
with self.subTest(length), self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns=[length])
def test_invalid_length(self):
msg = "BloomIndex.length must be None or an integer from 1 to 4096."
for length in (0, 4097):
with self.subTest(length), self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", length=length)
class BrinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BrinIndex
def test_suffix(self):
self.assertEqual(BrinIndex.suffix, "brin")
def test_deconstruction(self):
index = BrinIndex(
fields=["title"],
name="test_title_brin",
autosummarize=True,
pages_per_range=16,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BrinIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_brin",
"autosummarize": True,
"pages_per_range": 16,
},
)
def test_invalid_pages_per_range(self):
with self.assertRaisesMessage(
ValueError, "pages_per_range must be None or a positive integer"
):
BrinIndex(fields=["title"], name="test_title_brin", pages_per_range=0)
class BTreeIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BTreeIndex
def test_suffix(self):
self.assertEqual(BTreeIndex.suffix, "btree")
def test_deconstruction(self):
index = BTreeIndex(fields=["title"], name="test_title_btree", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BTreeIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_btree", "fillfactor": 80}
)
class GinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GinIndex
def test_suffix(self):
self.assertEqual(GinIndex.suffix, "gin")
def test_deconstruction(self):
index = GinIndex(
fields=["title"],
name="test_title_gin",
fastupdate=True,
gin_pending_list_limit=128,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.GinIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_gin",
"fastupdate": True,
"gin_pending_list_limit": 128,
},
)
class GistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GistIndex
def test_suffix(self):
self.assertEqual(GistIndex.suffix, "gist")
def test_deconstruction(self):
index = GistIndex(
fields=["title"], name="test_title_gist", buffering=False, fillfactor=80
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.GistIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_gist",
"buffering": False,
"fillfactor": 80,
},
)
class HashIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = HashIndex
def test_suffix(self):
self.assertEqual(HashIndex.suffix, "hash")
def test_deconstruction(self):
index = HashIndex(fields=["title"], name="test_title_hash", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.HashIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_hash", "fillfactor": 80}
)
class SpGistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = SpGistIndex
def test_suffix(self):
self.assertEqual(SpGistIndex.suffix, "spgist")
def test_deconstruction(self):
index = SpGistIndex(fields=["title"], name="test_title_spgist", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.SpGistIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_spgist", "fillfactor": 80}
)
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class SchemaTests(PostgreSQLTestCase):
get_opclass_query = """
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = %s
"""
def get_constraints(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_gin_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn(
"field", self.get_constraints(IntegerArrayModel._meta.db_table)
)
# Add the index
index_name = "integer_array_model_field_gin"
index = GinIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
# Check gin index was added
self.assertEqual(constraints[index_name]["type"], GinIndex.suffix)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_gin_fastupdate(self):
index_name = "integer_array_gin_fastupdate"
index = GinIndex(fields=["field"], name=index_name, fastupdate=False)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
self.assertEqual(constraints[index_name]["options"], ["fastupdate=off"])
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_partial_gin_index(self):
with register_lookup(CharField, Length):
index_name = "char_field_gin_partial_idx"
index = GinIndex(
fields=["field"], name=index_name, condition=Q(field__length=40)
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_partial_gin_index_with_tablespace(self):
with register_lookup(CharField, Length):
index_name = "char_field_gin_partial_idx"
index = GinIndex(
fields=["field"],
name=index_name,
condition=Q(field__length=40),
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
self.assertIn(
'TABLESPACE "pg_default" ',
str(index.create_sql(CharFieldModel, editor)),
)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gin_parameters(self):
index_name = "integer_array_gin_params"
index = GinIndex(
fields=["field"],
name=index_name,
fastupdate=True,
gin_pending_list_limit=64,
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
self.assertIn(
") WITH (gin_pending_list_limit = 64, fastupdate = on) TABLESPACE",
str(index.create_sql(IntegerArrayModel, editor)),
)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
self.assertEqual(
constraints[index_name]["options"],
["gin_pending_list_limit=64", "fastupdate=on"],
)
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_trigram_op_class_gin_index(self):
index_name = "trigram_op_class_gin"
index = GinIndex(OpClass(F("scene"), name="gin_trgm_ops"), name=index_name)
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("gin_trgm_ops", index_name)])
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GinIndex.suffix)
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_cast_search_vector_gin_index(self):
index_name = "cast_search_vector_gin"
index = GinIndex(Cast("field", SearchVectorField()), name=index_name)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
sql = index.create_sql(TextFieldModel, editor)
table = TextFieldModel._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GinIndex.suffix)
self.assertIs(sql.references_column(table, "field"), True)
self.assertIn("::tsvector", str(sql))
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_bloom_index(self):
index_name = "char_field_model_field_bloom"
index = BloomIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BloomIndex.suffix)
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_bloom_parameters(self):
index_name = "char_field_model_field_bloom_params"
index = BloomIndex(fields=["field"], name=index_name, length=512, columns=[3])
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BloomIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["length=512", "col1=3"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_brin_index(self):
index_name = "char_field_model_field_brin"
index = BrinIndex(fields=["field"], name=index_name, pages_per_range=4)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BrinIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["pages_per_range=4"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_brin_parameters(self):
index_name = "char_field_brin_params"
index = BrinIndex(fields=["field"], name=index_name, autosummarize=True)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BrinIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["autosummarize=on"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_btree_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_btree"
index = BTreeIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], BTreeIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_btree_parameters(self):
index_name = "integer_array_btree_fillfactor"
index = BTreeIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BTreeIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_gist"
index = GistIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gist_parameters(self):
index_name = "integer_array_gist_buffering"
index = GistIndex(
fields=["field"], name=index_name, buffering=True, fillfactor=80
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
self.assertEqual(
constraints[index_name]["options"], ["buffering=on", "fillfactor=80"]
)
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
@skipUnlessDBFeature("supports_covering_gist_indexes")
def test_gist_include(self):
index_name = "scene_gist_include_setting"
index = GistIndex(name=index_name, fields=["scene"], include=["setting"])
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
self.assertEqual(constraints[index_name]["columns"], ["scene", "setting"])
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_gist_include_not_supported(self):
index_name = "gist_include_exception"
index = GistIndex(fields=["scene"], name=index_name, include=["setting"])
msg = "Covering GiST indexes require PostgreSQL 12+."
with self.assertRaisesMessage(NotSupportedError, msg):
with mock.patch(
"django.db.backends.postgresql.features.DatabaseFeatures."
"supports_covering_gist_indexes",
False,
):
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_tsvector_op_class_gist_index(self):
index_name = "tsvector_op_class_gist"
index = GistIndex(
OpClass(
SearchVector("scene", "setting", config="english"),
name="tsvector_ops",
),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
sql = index.create_sql(Scene, editor)
table = Scene._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GistIndex.suffix)
self.assertIs(sql.references_column(table, "scene"), True)
self.assertIs(sql.references_column(table, "setting"), True)
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_hash_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_hash"
index = HashIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], HashIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_hash_parameters(self):
index_name = "integer_array_hash_fillfactor"
index = HashIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], HashIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_spgist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(TextFieldModel._meta.db_table))
# Add the index.
index_name = "text_field_model_field_spgist"
index = SpGistIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
constraints = self.get_constraints(TextFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(TextFieldModel._meta.db_table)
)
def test_spgist_parameters(self):
index_name = "text_field_model_spgist_fillfactor"
index = SpGistIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(TextFieldModel._meta.db_table)
)
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_spgist_include(self):
index_name = "scene_spgist_include_setting"
index = SpGistIndex(name=index_name, fields=["scene"], include=["setting"])
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
self.assertEqual(constraints[index_name]["columns"], ["scene", "setting"])
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_spgist_include_not_supported(self):
index_name = "spgist_include_exception"
index = SpGistIndex(fields=["scene"], name=index_name, include=["setting"])
msg = "Covering SP-GiST indexes require PostgreSQL 14+."
with self.assertRaisesMessage(NotSupportedError, msg):
with mock.patch(
"django.db.backends.postgresql.features.DatabaseFeatures."
"supports_covering_spgist_indexes",
False,
):
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_custom_suffix(self):
class CustomSuffixIndex(PostgresIndex):
suffix = "sfx"
def create_sql(self, model, schema_editor, using="gin", **kwargs):
return super().create_sql(model, schema_editor, using=using, **kwargs)
index = CustomSuffixIndex(fields=["field"], name="custom_suffix_idx")
self.assertEqual(index.suffix, "sfx")
with connection.schema_editor() as editor:
self.assertIn(
" USING gin ",
str(index.create_sql(CharFieldModel, editor)),
)
def test_op_class(self):
index_name = "test_op_class"
index = Index(
OpClass(Lower("field"), name="text_pattern_ops"),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
def test_op_class_descending_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
index_name = "test_op_class_descending_collation"
index = Index(
Collate(
OpClass(Lower("field"), name="text_pattern_ops").desc(nulls_last=True),
collation=collation,
),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
self.assertIn(
"COLLATE %s" % editor.quote_name(collation),
str(index.create_sql(TextFieldModel, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
table = TextFieldModel._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_op_class_descending_partial(self):
index_name = "test_op_class_descending_partial"
index = Index(
OpClass(Lower("field"), name="text_pattern_ops").desc(),
name=index_name,
condition=Q(field__contains="China"),
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
def test_op_class_descending_partial_tablespace(self):
index_name = "test_op_class_descending_partial_tablespace"
index = Index(
OpClass(Lower("field").desc(), name="text_pattern_ops"),
name=index_name,
condition=Q(field__contains="China"),
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
self.assertIn(
'TABLESPACE "pg_default" ',
str(index.create_sql(TextFieldModel, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
|
c46d92c766667005ea96c4c0dd5dd30f78acf95116b51877d037835d9d57e25e | import datetime
from decimal import Decimal
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db.models import (
BooleanField,
Case,
CharField,
Count,
DateTimeField,
DecimalField,
Exists,
ExpressionWrapper,
F,
FloatField,
Func,
IntegerField,
Max,
OuterRef,
Q,
Subquery,
Sum,
Value,
When,
)
from django.db.models.expressions import RawSQL
from django.db.models.functions import Coalesce, ExtractYear, Floor, Length, Lower, Trim
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import register_lookup
from .models import (
Author,
Book,
Company,
DepartmentStore,
Employee,
Publisher,
Store,
Ticket,
)
class NonAggregateAnnotationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="Brad Dayley", age=45)
cls.a4 = Author.objects.create(name="James Bennett", age=29)
cls.a5 = Author.objects.create(name="Jeffrey Forcier", age=37)
cls.a6 = Author.objects.create(name="Paul Bissex", age=29)
cls.a7 = Author.objects.create(name="Wesley J. Chun", age=25)
cls.a8 = Author.objects.create(name="Peter Norvig", age=57)
cls.a9 = Author.objects.create(name="Stuart Russell", age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name="Apress", num_awards=3)
cls.p2 = Publisher.objects.create(name="Sams", num_awards=1)
cls.p3 = Publisher.objects.create(name="Prentice Hall", num_awards=7)
cls.p4 = Publisher.objects.create(name="Morgan Kaufmann", num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn="159059725",
name="The Definitive Guide to Django: Web Development Done Right",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6),
)
cls.b2 = Book.objects.create(
isbn="067232959",
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
rating=3.0,
price=Decimal("23.09"),
contact=cls.a3,
publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3),
)
cls.b3 = Book.objects.create(
isbn="159059996",
name="Practical Django Projects",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a4,
publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23),
)
cls.b4 = Book.objects.create(
isbn="013235613",
name="Python Web Development with Django",
pages=350,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a5,
publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3),
)
cls.b5 = Book.objects.create(
isbn="013790395",
name="Artificial Intelligence: A Modern Approach",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a8,
publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15),
)
cls.b6 = Book.objects.create(
isbn="155860191",
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a8,
publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15),
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
cls.s1 = Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
cls.s2 = Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
cls.s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30),
)
cls.s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
cls.s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
cls.s3.books.add(cls.b3, cls.b4, cls.b6)
def test_basic_annotation(self):
books = Book.objects.annotate(is_book=Value(1))
for book in books:
self.assertEqual(book.is_book, 1)
def test_basic_f_annotation(self):
books = Book.objects.annotate(another_rating=F("rating"))
for book in books:
self.assertEqual(book.another_rating, book.rating)
def test_joined_annotation(self):
books = Book.objects.select_related("publisher").annotate(
num_awards=F("publisher__num_awards")
)
for book in books:
self.assertEqual(book.num_awards, book.publisher.num_awards)
def test_joined_transformed_annotation(self):
Employee.objects.bulk_create(
[
Employee(
first_name="John",
last_name="Doe",
age=18,
store=self.s1,
salary=15000,
),
Employee(
first_name="Jane",
last_name="Jones",
age=30,
store=self.s2,
salary=30000,
),
Employee(
first_name="Jo",
last_name="Smith",
age=55,
store=self.s3,
salary=50000,
),
]
)
employees = Employee.objects.annotate(
store_opened_year=F("store__original_opening__year"),
)
for employee in employees:
self.assertEqual(
employee.store_opened_year,
employee.store.original_opening.year,
)
def test_custom_transform_annotation(self):
with register_lookup(DecimalField, Floor):
books = Book.objects.annotate(floor_price=F("price__floor"))
self.assertCountEqual(
books.values_list("pk", "floor_price"),
[
(self.b1.pk, 30),
(self.b2.pk, 23),
(self.b3.pk, 29),
(self.b4.pk, 29),
(self.b5.pk, 82),
(self.b6.pk, 75),
],
)
def test_chaining_transforms(self):
Company.objects.create(name=" Django Software Foundation ")
Company.objects.create(name="Yahoo")
with register_lookup(CharField, Trim), register_lookup(CharField, Length):
for expr in [Length("name__trim"), F("name__trim__length")]:
with self.subTest(expr=expr):
self.assertCountEqual(
Company.objects.annotate(length=expr).values("name", "length"),
[
{"name": " Django Software Foundation ", "length": 26},
{"name": "Yahoo", "length": 5},
],
)
def test_mixed_type_annotation_date_interval(self):
active = datetime.datetime(2015, 3, 20, 14, 0, 0)
duration = datetime.timedelta(hours=1)
expires = datetime.datetime(2015, 3, 20, 14, 0, 0) + duration
Ticket.objects.create(active_at=active, duration=duration)
t = Ticket.objects.annotate(
expires=ExpressionWrapper(
F("active_at") + F("duration"), output_field=DateTimeField()
)
).first()
self.assertEqual(t.expires, expires)
def test_mixed_type_annotation_numbers(self):
test = self.b1
b = Book.objects.annotate(
combined=ExpressionWrapper(
F("pages") + F("rating"), output_field=IntegerField()
)
).get(isbn=test.isbn)
combined = int(test.pages + test.rating)
self.assertEqual(b.combined, combined)
def test_empty_expression_annotation(self):
books = Book.objects.annotate(
selected=ExpressionWrapper(Q(pk__in=[]), output_field=BooleanField())
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(not book.selected for book in books))
books = Book.objects.annotate(
selected=ExpressionWrapper(
Q(pk__in=Book.objects.none()), output_field=BooleanField()
)
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(not book.selected for book in books))
def test_full_expression_annotation(self):
books = Book.objects.annotate(
selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()),
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(book.selected for book in books))
def test_full_expression_annotation_with_aggregation(self):
qs = Book.objects.filter(isbn="159059725").annotate(
selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()),
rating_count=Count("rating"),
)
self.assertEqual([book.rating_count for book in qs], [1])
def test_aggregate_over_full_expression_annotation(self):
qs = Book.objects.annotate(
selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()),
).aggregate(Sum("selected"))
self.assertEqual(qs["selected__sum"], Book.objects.count())
def test_empty_queryset_annotation(self):
qs = Author.objects.annotate(empty=Subquery(Author.objects.values("id").none()))
self.assertIsNone(qs.first().empty)
def test_annotate_with_aggregation(self):
books = Book.objects.annotate(is_book=Value(1), rating_count=Count("rating"))
for book in books:
self.assertEqual(book.is_book, 1)
self.assertEqual(book.rating_count, 1)
def test_combined_expression_annotation_with_aggregation(self):
book = Book.objects.annotate(
combined=ExpressionWrapper(
Value(3) * Value(4), output_field=IntegerField()
),
rating_count=Count("rating"),
).first()
self.assertEqual(book.combined, 12)
self.assertEqual(book.rating_count, 1)
def test_combined_f_expression_annotation_with_aggregation(self):
book = (
Book.objects.filter(isbn="159059725")
.annotate(
combined=ExpressionWrapper(
F("price") * F("pages"), output_field=FloatField()
),
rating_count=Count("rating"),
)
.first()
)
self.assertEqual(book.combined, 13410.0)
self.assertEqual(book.rating_count, 1)
@skipUnlessDBFeature("supports_boolean_expr_in_select_clause")
def test_q_expression_annotation_with_aggregation(self):
book = (
Book.objects.filter(isbn="159059725")
.annotate(
isnull_pubdate=ExpressionWrapper(
Q(pubdate__isnull=True),
output_field=BooleanField(),
),
rating_count=Count("rating"),
)
.first()
)
self.assertIs(book.isnull_pubdate, False)
self.assertEqual(book.rating_count, 1)
@skipUnlessDBFeature("supports_boolean_expr_in_select_clause")
def test_grouping_by_q_expression_annotation(self):
authors = (
Author.objects.annotate(
under_40=ExpressionWrapper(Q(age__lt=40), output_field=BooleanField()),
)
.values("under_40")
.annotate(
count_id=Count("id"),
)
.values("under_40", "count_id")
)
self.assertCountEqual(
authors,
[
{"under_40": False, "count_id": 3},
{"under_40": True, "count_id": 6},
],
)
def test_aggregate_over_annotation(self):
agg = Author.objects.annotate(other_age=F("age")).aggregate(
otherage_sum=Sum("other_age")
)
other_agg = Author.objects.aggregate(age_sum=Sum("age"))
self.assertEqual(agg["otherage_sum"], other_agg["age_sum"])
@skipUnlessDBFeature("can_distinct_on_fields")
def test_distinct_on_with_annotation(self):
store = Store.objects.create(
name="test store",
original_opening=datetime.datetime.now(),
friday_night_closing=datetime.time(21, 00, 00),
)
names = [
"Theodore Roosevelt",
"Eleanor Roosevelt",
"Franklin Roosevelt",
"Ned Stark",
"Catelyn Stark",
]
for name in names:
Employee.objects.create(
store=store,
first_name=name.split()[0],
last_name=name.split()[1],
age=30,
salary=2000,
)
people = Employee.objects.annotate(
name_lower=Lower("last_name"),
).distinct("name_lower")
self.assertEqual({p.last_name for p in people}, {"Stark", "Roosevelt"})
self.assertEqual(len(people), 2)
people2 = Employee.objects.annotate(
test_alias=F("store__name"),
).distinct("test_alias")
self.assertEqual(len(people2), 1)
lengths = (
Employee.objects.annotate(
name_len=Length("first_name"),
)
.distinct("name_len")
.values_list("name_len", flat=True)
)
self.assertCountEqual(lengths, [3, 7, 8])
def test_filter_annotation(self):
books = Book.objects.annotate(is_book=Value(1)).filter(is_book=1)
for book in books:
self.assertEqual(book.is_book, 1)
def test_filter_annotation_with_f(self):
books = Book.objects.annotate(other_rating=F("rating")).filter(other_rating=3.5)
for book in books:
self.assertEqual(book.other_rating, 3.5)
def test_filter_annotation_with_double_f(self):
books = Book.objects.annotate(other_rating=F("rating")).filter(
other_rating=F("rating")
)
for book in books:
self.assertEqual(book.other_rating, book.rating)
def test_filter_agg_with_double_f(self):
books = Book.objects.annotate(sum_rating=Sum("rating")).filter(
sum_rating=F("sum_rating")
)
for book in books:
self.assertEqual(book.sum_rating, book.rating)
def test_filter_wrong_annotation(self):
with self.assertRaisesMessage(
FieldError, "Cannot resolve keyword 'nope' into field."
):
list(
Book.objects.annotate(sum_rating=Sum("rating")).filter(
sum_rating=F("nope")
)
)
def test_decimal_annotation(self):
salary = Decimal(10) ** -Employee._meta.get_field("salary").decimal_places
Employee.objects.create(
first_name="Max",
last_name="Paine",
store=Store.objects.first(),
age=23,
salary=salary,
)
self.assertEqual(
Employee.objects.annotate(new_salary=F("salary") / 10).get().new_salary,
salary / 10,
)
def test_filter_decimal_annotation(self):
qs = (
Book.objects.annotate(new_price=F("price") + 1)
.filter(new_price=Decimal(31))
.values_list("new_price")
)
self.assertEqual(qs.get(), (Decimal(31),))
def test_combined_annotation_commutative(self):
book1 = Book.objects.annotate(adjusted_rating=F("rating") + 2).get(
pk=self.b1.pk
)
book2 = Book.objects.annotate(adjusted_rating=2 + F("rating")).get(
pk=self.b1.pk
)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
book1 = Book.objects.annotate(adjusted_rating=F("rating") + None).get(
pk=self.b1.pk
)
book2 = Book.objects.annotate(adjusted_rating=None + F("rating")).get(
pk=self.b1.pk
)
self.assertIs(book1.adjusted_rating, None)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
def test_update_with_annotation(self):
book_preupdate = Book.objects.get(pk=self.b2.pk)
Book.objects.annotate(other_rating=F("rating") - 1).update(
rating=F("other_rating")
)
book_postupdate = Book.objects.get(pk=self.b2.pk)
self.assertEqual(book_preupdate.rating - 1, book_postupdate.rating)
def test_annotation_with_m2m(self):
books = (
Book.objects.annotate(author_age=F("authors__age"))
.filter(pk=self.b1.pk)
.order_by("author_age")
)
self.assertEqual(books[0].author_age, 34)
self.assertEqual(books[1].author_age, 35)
def test_annotation_reverse_m2m(self):
books = (
Book.objects.annotate(
store_name=F("store__name"),
)
.filter(
name="Practical Django Projects",
)
.order_by("store_name")
)
self.assertQuerysetEqual(
books,
["Amazon.com", "Books.com", "Mamma and Pappa's Books"],
lambda b: b.store_name,
)
def test_values_annotation(self):
"""
Annotations can reference fields in a values clause,
and contribute to an existing values clause.
"""
# annotate references a field in values()
qs = Book.objects.values("rating").annotate(other_rating=F("rating") - 1)
book = qs.get(pk=self.b1.pk)
self.assertEqual(book["rating"] - 1, book["other_rating"])
# filter refs the annotated value
book = qs.get(other_rating=4)
self.assertEqual(book["other_rating"], 4)
# can annotate an existing values with a new field
book = qs.annotate(other_isbn=F("isbn")).get(other_rating=4)
self.assertEqual(book["other_rating"], 4)
self.assertEqual(book["other_isbn"], "155860191")
def test_values_with_pk_annotation(self):
# annotate references a field in values() with pk
publishers = Publisher.objects.values("id", "book__rating").annotate(
total=Sum("book__rating")
)
for publisher in publishers.filter(pk=self.p1.pk):
self.assertEqual(publisher["book__rating"], publisher["total"])
@skipUnlessDBFeature("allows_group_by_pk")
def test_rawsql_group_by_collapse(self):
raw = RawSQL("SELECT MIN(id) FROM annotations_book", [])
qs = (
Author.objects.values("id")
.annotate(
min_book_id=raw,
count_friends=Count("friends"),
)
.order_by()
)
_, _, group_by = qs.query.get_compiler(using="default").pre_sql_setup()
self.assertEqual(len(group_by), 1)
self.assertNotEqual(raw, group_by[0])
def test_defer_annotation(self):
"""
Deferred attributes can be referenced by an annotation,
but they are not themselves deferred, and cannot be deferred.
"""
qs = Book.objects.defer("rating").annotate(other_rating=F("rating") - 1)
with self.assertNumQueries(2):
book = qs.get(other_rating=4)
self.assertEqual(book.rating, 5)
self.assertEqual(book.other_rating, 4)
with self.assertRaisesMessage(
FieldDoesNotExist, "Book has no field named 'other_rating'"
):
book = qs.defer("other_rating").get(other_rating=4)
def test_mti_annotations(self):
"""
Fields on an inherited model can be referenced by an
annotated field.
"""
d = DepartmentStore.objects.create(
name="Angus & Robinson",
original_opening=datetime.date(2014, 3, 8),
friday_night_closing=datetime.time(21, 00, 00),
chain="Westfield",
)
books = Book.objects.filter(rating__gt=4)
for b in books:
d.books.add(b)
qs = (
DepartmentStore.objects.annotate(
other_name=F("name"),
other_chain=F("chain"),
is_open=Value(True, BooleanField()),
book_isbn=F("books__isbn"),
)
.order_by("book_isbn")
.filter(chain="Westfield")
)
self.assertQuerysetEqual(
qs,
[
("Angus & Robinson", "Westfield", True, "155860191"),
("Angus & Robinson", "Westfield", True, "159059725"),
],
lambda d: (d.other_name, d.other_chain, d.is_open, d.book_isbn),
)
def test_null_annotation(self):
"""
Annotating None onto a model round-trips
"""
book = Book.objects.annotate(
no_value=Value(None, output_field=IntegerField())
).first()
self.assertIsNone(book.no_value)
def test_order_by_annotation(self):
authors = Author.objects.annotate(other_age=F("age")).order_by("other_age")
self.assertQuerysetEqual(
authors,
[
25,
29,
29,
34,
35,
37,
45,
46,
57,
],
lambda a: a.other_age,
)
def test_order_by_aggregate(self):
authors = (
Author.objects.values("age")
.annotate(age_count=Count("age"))
.order_by("age_count", "age")
)
self.assertQuerysetEqual(
authors,
[
(25, 1),
(34, 1),
(35, 1),
(37, 1),
(45, 1),
(46, 1),
(57, 1),
(29, 2),
],
lambda a: (a["age"], a["age_count"]),
)
def test_raw_sql_with_inherited_field(self):
DepartmentStore.objects.create(
name="Angus & Robinson",
original_opening=datetime.date(2014, 3, 8),
friday_night_closing=datetime.time(21),
chain="Westfield",
area=123,
)
tests = (
("name", "Angus & Robinson"),
("surface", 123),
("case when name='Angus & Robinson' then chain else name end", "Westfield"),
)
for sql, expected_result in tests:
with self.subTest(sql=sql):
self.assertSequenceEqual(
DepartmentStore.objects.annotate(
annotation=RawSQL(sql, ()),
).values_list("annotation", flat=True),
[expected_result],
)
def test_annotate_exists(self):
authors = Author.objects.annotate(c=Count("id")).filter(c__gt=1)
self.assertFalse(authors.exists())
def test_column_field_ordering(self):
"""
Columns are aligned in the correct order for resolve_columns. This test
will fail on MySQL if column ordering is out. Column fields should be
aligned as:
1. extra_select
2. model_fields
3. annotation_fields
4. model_related_fields
"""
store = Store.objects.first()
Employee.objects.create(
id=1,
first_name="Max",
manager=True,
last_name="Paine",
store=store,
age=23,
salary=Decimal(50000.00),
)
Employee.objects.create(
id=2,
first_name="Buffy",
manager=False,
last_name="Summers",
store=store,
age=18,
salary=Decimal(40000.00),
)
qs = (
Employee.objects.extra(select={"random_value": "42"})
.select_related("store")
.annotate(
annotated_value=Value(17),
)
)
rows = [
(1, "Max", True, 42, "Paine", 23, Decimal(50000.00), store.name, 17),
(2, "Buffy", False, 42, "Summers", 18, Decimal(40000.00), store.name, 17),
]
self.assertQuerysetEqual(
qs.order_by("id"),
rows,
lambda e: (
e.id,
e.first_name,
e.manager,
e.random_value,
e.last_name,
e.age,
e.salary,
e.store.name,
e.annotated_value,
),
)
def test_column_field_ordering_with_deferred(self):
store = Store.objects.first()
Employee.objects.create(
id=1,
first_name="Max",
manager=True,
last_name="Paine",
store=store,
age=23,
salary=Decimal(50000.00),
)
Employee.objects.create(
id=2,
first_name="Buffy",
manager=False,
last_name="Summers",
store=store,
age=18,
salary=Decimal(40000.00),
)
qs = (
Employee.objects.extra(select={"random_value": "42"})
.select_related("store")
.annotate(
annotated_value=Value(17),
)
)
rows = [
(1, "Max", True, 42, "Paine", 23, Decimal(50000.00), store.name, 17),
(2, "Buffy", False, 42, "Summers", 18, Decimal(40000.00), store.name, 17),
]
# and we respect deferred columns!
self.assertQuerysetEqual(
qs.defer("age").order_by("id"),
rows,
lambda e: (
e.id,
e.first_name,
e.manager,
e.random_value,
e.last_name,
e.age,
e.salary,
e.store.name,
e.annotated_value,
),
)
def test_custom_functions(self):
Company(
name="Apple",
motto=None,
ticker_name="APPL",
description="Beautiful Devices",
).save()
Company(
name="Django Software Foundation",
motto=None,
ticker_name=None,
description=None,
).save()
Company(
name="Google",
motto="Do No Evil",
ticker_name="GOOG",
description="Internet Company",
).save()
Company(
name="Yahoo", motto=None, ticker_name=None, description="Internet Company"
).save()
qs = Company.objects.annotate(
tagline=Func(
F("motto"),
F("ticker_name"),
F("description"),
Value("No Tag"),
function="COALESCE",
)
).order_by("name")
self.assertQuerysetEqual(
qs,
[
("Apple", "APPL"),
("Django Software Foundation", "No Tag"),
("Google", "Do No Evil"),
("Yahoo", "Internet Company"),
],
lambda c: (c.name, c.tagline),
)
def test_custom_functions_can_ref_other_functions(self):
Company(
name="Apple",
motto=None,
ticker_name="APPL",
description="Beautiful Devices",
).save()
Company(
name="Django Software Foundation",
motto=None,
ticker_name=None,
description=None,
).save()
Company(
name="Google",
motto="Do No Evil",
ticker_name="GOOG",
description="Internet Company",
).save()
Company(
name="Yahoo", motto=None, ticker_name=None, description="Internet Company"
).save()
class Lower(Func):
function = "LOWER"
qs = (
Company.objects.annotate(
tagline=Func(
F("motto"),
F("ticker_name"),
F("description"),
Value("No Tag"),
function="COALESCE",
)
)
.annotate(
tagline_lower=Lower(F("tagline")),
)
.order_by("name")
)
# LOWER function supported by:
# oracle, postgres, mysql, sqlite, sqlserver
self.assertQuerysetEqual(
qs,
[
("Apple", "APPL".lower()),
("Django Software Foundation", "No Tag".lower()),
("Google", "Do No Evil".lower()),
("Yahoo", "Internet Company".lower()),
],
lambda c: (c.name, c.tagline_lower),
)
def test_boolean_value_annotation(self):
books = Book.objects.annotate(
is_book=Value(True, output_field=BooleanField()),
is_pony=Value(False, output_field=BooleanField()),
is_none=Value(None, output_field=BooleanField(null=True)),
)
self.assertGreater(len(books), 0)
for book in books:
self.assertIs(book.is_book, True)
self.assertIs(book.is_pony, False)
self.assertIsNone(book.is_none)
def test_annotation_in_f_grouped_by_annotation(self):
qs = (
Publisher.objects.annotate(multiplier=Value(3))
# group by option => sum of value * multiplier
.values("name")
.annotate(multiplied_value_sum=Sum(F("multiplier") * F("num_awards")))
.order_by()
)
self.assertCountEqual(
qs,
[
{"multiplied_value_sum": 9, "name": "Apress"},
{"multiplied_value_sum": 0, "name": "Jonno's House of Books"},
{"multiplied_value_sum": 27, "name": "Morgan Kaufmann"},
{"multiplied_value_sum": 21, "name": "Prentice Hall"},
{"multiplied_value_sum": 3, "name": "Sams"},
],
)
def test_arguments_must_be_expressions(self):
msg = "QuerySet.annotate() received non-expression(s): %s."
with self.assertRaisesMessage(TypeError, msg % BooleanField()):
Book.objects.annotate(BooleanField())
with self.assertRaisesMessage(TypeError, msg % True):
Book.objects.annotate(is_book=True)
with self.assertRaisesMessage(
TypeError, msg % ", ".join([str(BooleanField()), "True"])
):
Book.objects.annotate(BooleanField(), Value(False), is_book=True)
def test_chaining_annotation_filter_with_m2m(self):
qs = (
Author.objects.filter(
name="Adrian Holovaty",
friends__age=35,
)
.annotate(
jacob_name=F("friends__name"),
)
.filter(
friends__age=29,
)
.annotate(
james_name=F("friends__name"),
)
.values("jacob_name", "james_name")
)
self.assertCountEqual(
qs,
[{"jacob_name": "Jacob Kaplan-Moss", "james_name": "James Bennett"}],
)
def test_annotation_filter_with_subquery(self):
long_books_qs = (
Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=400,
)
.values("publisher")
.annotate(count=Count("pk"))
.values("count")
)
publisher_books_qs = (
Publisher.objects.annotate(
total_books=Count("book"),
)
.filter(
total_books=Subquery(long_books_qs, output_field=IntegerField()),
)
.values("name")
)
self.assertCountEqual(
publisher_books_qs, [{"name": "Sams"}, {"name": "Morgan Kaufmann"}]
)
def test_annotation_exists_aggregate_values_chaining(self):
qs = (
Book.objects.values("publisher")
.annotate(
has_authors=Exists(
Book.authors.through.objects.filter(book=OuterRef("pk"))
),
max_pubdate=Max("pubdate"),
)
.values_list("max_pubdate", flat=True)
.order_by("max_pubdate")
)
self.assertCountEqual(
qs,
[
datetime.date(1991, 10, 15),
datetime.date(2008, 3, 3),
datetime.date(2008, 6, 23),
datetime.date(2008, 11, 3),
],
)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_annotation_subquery_and_aggregate_values_chaining(self):
qs = (
Book.objects.annotate(pub_year=ExtractYear("pubdate"))
.values("pub_year")
.annotate(
top_rating=Subquery(
Book.objects.filter(pubdate__year=OuterRef("pub_year"))
.order_by("-rating")
.values("rating")[:1]
),
total_pages=Sum("pages"),
)
.values("pub_year", "total_pages", "top_rating")
)
self.assertCountEqual(
qs,
[
{"pub_year": 1991, "top_rating": 5.0, "total_pages": 946},
{"pub_year": 1995, "top_rating": 4.0, "total_pages": 1132},
{"pub_year": 2007, "top_rating": 4.5, "total_pages": 447},
{"pub_year": 2008, "top_rating": 4.0, "total_pages": 1178},
],
)
def test_annotation_subquery_outerref_transform(self):
qs = Book.objects.annotate(
top_rating_year=Subquery(
Book.objects.filter(pubdate__year=OuterRef("pubdate__year"))
.order_by("-rating")
.values("rating")[:1]
),
).values("pubdate__year", "top_rating_year")
self.assertCountEqual(
qs,
[
{"pubdate__year": 1991, "top_rating_year": 5.0},
{"pubdate__year": 1995, "top_rating_year": 4.0},
{"pubdate__year": 2007, "top_rating_year": 4.5},
{"pubdate__year": 2008, "top_rating_year": 4.0},
{"pubdate__year": 2008, "top_rating_year": 4.0},
{"pubdate__year": 2008, "top_rating_year": 4.0},
],
)
def test_annotation_aggregate_with_m2o(self):
qs = (
Author.objects.filter(age__lt=30)
.annotate(
max_pages=Case(
When(book_contact_set__isnull=True, then=Value(0)),
default=Max(F("book__pages")),
),
)
.values("name", "max_pages")
)
self.assertCountEqual(
qs,
[
{"name": "James Bennett", "max_pages": 300},
{"name": "Paul Bissex", "max_pages": 0},
{"name": "Wesley J. Chun", "max_pages": 0},
],
)
def test_alias_sql_injection(self):
crafted_alias = """injected_name" from "annotations_book"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Book.objects.annotate(**{crafted_alias: Value(1)})
def test_alias_forbidden_chars(self):
tests = [
'al"ias',
"a'lias",
"ali`as",
"alia s",
"alias\t",
"ali\nas",
"alias--",
"ali/*as",
"alias*/",
"alias;",
# [] are used by MSSQL.
"alias[",
"alias]",
]
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
for crafted_alias in tests:
with self.subTest(crafted_alias):
with self.assertRaisesMessage(ValueError, msg):
Book.objects.annotate(**{crafted_alias: Value(1)})
class AliasTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="James Bennett", age=34)
cls.a4 = Author.objects.create(name="Peter Norvig", age=57)
cls.a5 = Author.objects.create(name="Stuart Russell", age=46)
p1 = Publisher.objects.create(name="Apress", num_awards=3)
cls.b1 = Book.objects.create(
isbn="159059725",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=p1,
pubdate=datetime.date(2007, 12, 6),
name="The Definitive Guide to Django: Web Development Done Right",
)
cls.b2 = Book.objects.create(
isbn="159059996",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a3,
publisher=p1,
pubdate=datetime.date(2008, 6, 23),
name="Practical Django Projects",
)
cls.b3 = Book.objects.create(
isbn="013790395",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a4,
publisher=p1,
pubdate=datetime.date(1995, 1, 15),
name="Artificial Intelligence: A Modern Approach",
)
cls.b4 = Book.objects.create(
isbn="155860191",
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a4,
publisher=p1,
pubdate=datetime.date(1991, 10, 15),
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4, cls.a5)
cls.b4.authors.add(cls.a4)
Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
def test_basic_alias(self):
qs = Book.objects.alias(is_book=Value(1))
self.assertIs(hasattr(qs.first(), "is_book"), False)
def test_basic_alias_annotation(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
).annotate(is_book=F("is_book_alias"))
self.assertIs(hasattr(qs.first(), "is_book_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_basic_alias_f_annotation(self):
qs = Book.objects.alias(another_rating_alias=F("rating")).annotate(
another_rating=F("another_rating_alias")
)
self.assertIs(hasattr(qs.first(), "another_rating_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.another_rating, book.rating)
def test_basic_alias_f_transform_annotation(self):
qs = Book.objects.alias(
pubdate_alias=F("pubdate"),
).annotate(pubdate_year=F("pubdate_alias__year"))
self.assertIs(hasattr(qs.first(), "pubdate_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.pubdate_year, book.pubdate.year)
def test_alias_after_annotation(self):
qs = Book.objects.annotate(
is_book=Value(1),
).alias(is_book_alias=F("is_book"))
book = qs.first()
self.assertIs(hasattr(book, "is_book"), True)
self.assertIs(hasattr(book, "is_book_alias"), False)
def test_overwrite_annotation_with_alias(self):
qs = Book.objects.annotate(is_book=Value(1)).alias(is_book=F("is_book"))
self.assertIs(hasattr(qs.first(), "is_book"), False)
def test_overwrite_alias_with_annotation(self):
qs = Book.objects.alias(is_book=Value(1)).annotate(is_book=F("is_book"))
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_alias_annotation_expression(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
).annotate(is_book=Coalesce("is_book_alias", 0))
self.assertIs(hasattr(qs.first(), "is_book_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_alias_default_alias_expression(self):
qs = Author.objects.alias(
Sum("book__pages"),
).filter(book__pages__sum__gt=2000)
self.assertIs(hasattr(qs.first(), "book__pages__sum"), False)
self.assertSequenceEqual(qs, [self.a4])
def test_joined_alias_annotation(self):
qs = (
Book.objects.select_related("publisher")
.alias(
num_awards_alias=F("publisher__num_awards"),
)
.annotate(num_awards=F("num_awards_alias"))
)
self.assertIs(hasattr(qs.first(), "num_awards_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.num_awards, book.publisher.num_awards)
def test_alias_annotate_with_aggregation(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
rating_count_alias=Count("rating"),
).annotate(
is_book=F("is_book_alias"),
rating_count=F("rating_count_alias"),
)
book = qs.first()
self.assertIs(hasattr(book, "is_book_alias"), False)
self.assertIs(hasattr(book, "rating_count_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
self.assertEqual(book.rating_count, 1)
def test_filter_alias_with_f(self):
qs = Book.objects.alias(
other_rating=F("rating"),
).filter(other_rating=4.5)
self.assertIs(hasattr(qs.first(), "other_rating"), False)
self.assertSequenceEqual(qs, [self.b1])
def test_filter_alias_with_double_f(self):
qs = Book.objects.alias(
other_rating=F("rating"),
).filter(other_rating=F("rating"))
self.assertIs(hasattr(qs.first(), "other_rating"), False)
self.assertEqual(qs.count(), Book.objects.count())
def test_filter_alias_agg_with_double_f(self):
qs = Book.objects.alias(
sum_rating=Sum("rating"),
).filter(sum_rating=F("sum_rating"))
self.assertIs(hasattr(qs.first(), "sum_rating"), False)
self.assertEqual(qs.count(), Book.objects.count())
def test_update_with_alias(self):
Book.objects.alias(
other_rating=F("rating") - 1,
).update(rating=F("other_rating"))
self.b1.refresh_from_db()
self.assertEqual(self.b1.rating, 3.5)
def test_order_by_alias(self):
qs = Author.objects.alias(other_age=F("age")).order_by("other_age")
self.assertIs(hasattr(qs.first(), "other_age"), False)
self.assertQuerysetEqual(qs, [34, 34, 35, 46, 57], lambda a: a.age)
def test_order_by_alias_aggregate(self):
qs = (
Author.objects.values("age")
.alias(age_count=Count("age"))
.order_by("age_count", "age")
)
self.assertIs(hasattr(qs.first(), "age_count"), False)
self.assertQuerysetEqual(qs, [35, 46, 57, 34], lambda a: a["age"])
def test_dates_alias(self):
qs = Book.objects.alias(
pubdate_alias=F("pubdate"),
).dates("pubdate_alias", "month")
self.assertCountEqual(
qs,
[
datetime.date(1991, 10, 1),
datetime.date(1995, 1, 1),
datetime.date(2007, 12, 1),
datetime.date(2008, 6, 1),
],
)
def test_datetimes_alias(self):
qs = Store.objects.alias(
original_opening_alias=F("original_opening"),
).datetimes("original_opening_alias", "year")
self.assertCountEqual(
qs,
[
datetime.datetime(1994, 1, 1),
datetime.datetime(2001, 1, 1),
],
)
def test_aggregate_alias(self):
msg = (
"Cannot aggregate over the 'other_age' alias. Use annotate() to promote it."
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.alias(
other_age=F("age"),
).aggregate(otherage_sum=Sum("other_age"))
def test_defer_only_alias(self):
qs = Book.objects.alias(rating_alias=F("rating") - 1)
msg = "Book has no field named 'rating_alias'"
for operation in ["defer", "only"]:
with self.subTest(operation=operation):
with self.assertRaisesMessage(FieldDoesNotExist, msg):
getattr(qs, operation)("rating_alias").first()
@skipUnlessDBFeature("can_distinct_on_fields")
def test_distinct_on_alias(self):
qs = Book.objects.alias(rating_alias=F("rating") - 1)
msg = "Cannot resolve keyword 'rating_alias' into field."
with self.assertRaisesMessage(FieldError, msg):
qs.distinct("rating_alias").first()
def test_values_alias(self):
qs = Book.objects.alias(rating_alias=F("rating") - 1)
msg = "Cannot select the 'rating_alias' alias. Use annotate() to promote it."
for operation in ["values", "values_list"]:
with self.subTest(operation=operation):
with self.assertRaisesMessage(FieldError, msg):
getattr(qs, operation)("rating_alias")
def test_alias_sql_injection(self):
crafted_alias = """injected_name" from "annotations_book"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Book.objects.alias(**{crafted_alias: Value(1)})
|
5453f7fec261b4eccba6571bc8b47345d1d65df4c5ce71211722b8f93ea89509 | import os
import re
from io import StringIO
from unittest import mock, skipUnless
from django.core.management import call_command
from django.db import connection
from django.db.backends.base.introspection import TableInfo
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import PeopleMoreData, test_collation
def inspectdb_tables_only(table_name):
"""
Limit introspection to tables created for models of this app.
Some databases such as Oracle are extremely slow at introspection.
"""
return table_name.startswith("inspectdb_")
def inspectdb_views_only(table_name):
return table_name.startswith("inspectdb_") and table_name.endswith(
("_materialized", "_view")
)
def special_table_only(table_name):
return table_name.startswith("inspectdb_special")
class InspectDBTestCase(TestCase):
unique_re = re.compile(r".*unique_together = \((.+),\).*")
def test_stealth_table_name_filter_option(self):
out = StringIO()
call_command("inspectdb", table_name_filter=inspectdb_tables_only, stdout=out)
error_message = (
"inspectdb has examined a table that should have been filtered out."
)
# contrib.contenttypes is one of the apps always installed when running
# the Django test suite, check that one of its tables hasn't been
# inspected
self.assertNotIn(
"class DjangoContentType(models.Model):", out.getvalue(), msg=error_message
)
def test_table_option(self):
"""
inspectdb can inspect a subset of tables by passing the table names as
arguments.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_people", stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbPeople(models.Model):", output)
self.assertNotIn("InspectdbPeopledata", output)
def make_field_type_asserter(self):
"""
Call inspectdb and return a function to validate a field type in its
output.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
def assertFieldType(name, definition):
out_def = re.search(r"^\s*%s = (models.*)$" % name, output, re.MULTILINE)[1]
self.assertEqual(definition, out_def)
return assertFieldType
def test_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
introspected_field_types = connection.features.introspected_field_types
char_field_type = introspected_field_types["CharField"]
# Inspecting Oracle DB doesn't produce correct results (#19884):
# - it reports fields as blank=True when they aren't.
if (
not connection.features.interprets_empty_strings_as_nulls
and char_field_type == "CharField"
):
assertFieldType("char_field", "models.CharField(max_length=10)")
assertFieldType(
"null_char_field",
"models.CharField(max_length=10, blank=True, null=True)",
)
assertFieldType("email_field", "models.CharField(max_length=254)")
assertFieldType("file_field", "models.CharField(max_length=100)")
assertFieldType("file_path_field", "models.CharField(max_length=100)")
assertFieldType("slug_field", "models.CharField(max_length=50)")
assertFieldType("text_field", "models.TextField()")
assertFieldType("url_field", "models.CharField(max_length=200)")
if char_field_type == "TextField":
assertFieldType("char_field", "models.TextField()")
assertFieldType(
"null_char_field", "models.TextField(blank=True, null=True)"
)
assertFieldType("email_field", "models.TextField()")
assertFieldType("file_field", "models.TextField()")
assertFieldType("file_path_field", "models.TextField()")
assertFieldType("slug_field", "models.TextField()")
assertFieldType("text_field", "models.TextField()")
assertFieldType("url_field", "models.TextField()")
assertFieldType("date_field", "models.DateField()")
assertFieldType("date_time_field", "models.DateTimeField()")
if introspected_field_types["GenericIPAddressField"] == "GenericIPAddressField":
assertFieldType("gen_ip_address_field", "models.GenericIPAddressField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType("gen_ip_address_field", "models.CharField(max_length=39)")
assertFieldType(
"time_field", "models.%s()" % introspected_field_types["TimeField"]
)
if connection.features.has_native_uuid_field:
assertFieldType("uuid_field", "models.UUIDField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType("uuid_field", "models.CharField(max_length=32)")
@skipUnlessDBFeature("can_introspect_json_field", "supports_json_field")
def test_json_field(self):
out = StringIO()
call_command("inspectdb", "inspectdb_jsonfieldcolumntype", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn("json_field = models.JSONField()", output)
self.assertIn(
"null_json_field = models.JSONField(blank=True, null=True)", output
)
@skipUnlessDBFeature("supports_collation_on_charfield")
@skipUnless(test_collation, "Language collations are not supported.")
def test_char_field_db_collation(self):
out = StringIO()
call_command("inspectdb", "inspectdb_charfielddbcollation", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn(
"char_field = models.CharField(max_length=10, "
"db_collation='%s')" % test_collation,
output,
)
else:
self.assertIn(
"char_field = models.CharField(max_length=10, "
"db_collation='%s', blank=True, null=True)" % test_collation,
output,
)
@skipUnlessDBFeature("supports_collation_on_textfield")
@skipUnless(test_collation, "Language collations are not supported.")
def test_text_field_db_collation(self):
out = StringIO()
call_command("inspectdb", "inspectdb_textfielddbcollation", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn(
"text_field = models.TextField(db_collation='%s')" % test_collation,
output,
)
else:
self.assertIn(
"text_field = models.TextField(db_collation='%s, blank=True, "
"null=True)" % test_collation,
output,
)
def test_number_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
introspected_field_types = connection.features.introspected_field_types
auto_field_type = connection.features.introspected_field_types["AutoField"]
if auto_field_type != "AutoField":
assertFieldType(
"id", "models.%s(primary_key=True) # AutoField?" % auto_field_type
)
assertFieldType(
"big_int_field", "models.%s()" % introspected_field_types["BigIntegerField"]
)
bool_field_type = introspected_field_types["BooleanField"]
assertFieldType("bool_field", "models.{}()".format(bool_field_type))
assertFieldType(
"null_bool_field",
"models.{}(blank=True, null=True)".format(bool_field_type),
)
if connection.vendor != "sqlite":
assertFieldType(
"decimal_field", "models.DecimalField(max_digits=6, decimal_places=1)"
)
else: # Guessed arguments on SQLite, see #5014
assertFieldType(
"decimal_field",
"models.DecimalField(max_digits=10, decimal_places=5) "
"# max_digits and decimal_places have been guessed, "
"as this database handles decimal fields as float",
)
assertFieldType("float_field", "models.FloatField()")
assertFieldType(
"int_field", "models.%s()" % introspected_field_types["IntegerField"]
)
assertFieldType(
"pos_int_field",
"models.%s()" % introspected_field_types["PositiveIntegerField"],
)
assertFieldType(
"pos_big_int_field",
"models.%s()" % introspected_field_types["PositiveBigIntegerField"],
)
assertFieldType(
"pos_small_int_field",
"models.%s()" % introspected_field_types["PositiveSmallIntegerField"],
)
assertFieldType(
"small_int_field",
"models.%s()" % introspected_field_types["SmallIntegerField"],
)
@skipUnlessDBFeature("can_introspect_foreign_keys")
def test_attribute_name_not_python_keyword(self):
out = StringIO()
call_command("inspectdb", table_name_filter=inspectdb_tables_only, stdout=out)
output = out.getvalue()
error_message = (
"inspectdb generated an attribute name which is a Python keyword"
)
# Recursive foreign keys should be set to 'self'
self.assertIn("parent = models.ForeignKey('self', models.DO_NOTHING)", output)
self.assertNotIn(
"from = models.ForeignKey(InspectdbPeople, models.DO_NOTHING)",
output,
msg=error_message,
)
# As InspectdbPeople model is defined after InspectdbMessage, it should
# be quoted.
self.assertIn(
"from_field = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, "
"db_column='from_id')",
output,
)
self.assertIn(
"people_pk = models.OneToOneField(InspectdbPeople, models.DO_NOTHING, "
"primary_key=True)",
output,
)
self.assertIn(
"people_unique = models.OneToOneField(InspectdbPeople, models.DO_NOTHING)",
output,
)
@skipUnlessDBFeature("can_introspect_foreign_keys")
def test_foreign_key_to_field(self):
out = StringIO()
call_command("inspectdb", "inspectdb_foreignkeytofield", stdout=out)
self.assertIn(
"to_field_fk = models.ForeignKey('InspectdbPeoplemoredata', "
"models.DO_NOTHING, to_field='people_unique_id')",
out.getvalue(),
)
def test_digits_column_name_introspection(self):
"""Introspection of column names consist/start with digits (#16536/#17676)"""
char_field_type = connection.features.introspected_field_types["CharField"]
out = StringIO()
call_command("inspectdb", "inspectdb_digitsincolumnname", stdout=out)
output = out.getvalue()
error_message = "inspectdb generated a model field name which is a number"
self.assertNotIn(
" 123 = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_123 = models.%s" % char_field_type, output)
error_message = (
"inspectdb generated a model field name which starts with a digit"
)
self.assertNotIn(
" 4extra = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_4extra = models.%s" % char_field_type, output)
self.assertNotIn(
" 45extra = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_45extra = models.%s" % char_field_type, output)
def test_special_column_name_introspection(self):
"""
Introspection of column names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command("inspectdb", table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
base_name = connection.introspection.identifier_converter("Field")
integer_field_type = connection.features.introspected_field_types[
"IntegerField"
]
self.assertIn("field = models.%s()" % integer_field_type, output)
self.assertIn(
"field_field = models.%s(db_column='%s_')"
% (integer_field_type, base_name),
output,
)
self.assertIn(
"field_field_0 = models.%s(db_column='%s__')"
% (integer_field_type, base_name),
output,
)
self.assertIn(
"field_field_1 = models.%s(db_column='__field')" % integer_field_type,
output,
)
self.assertIn(
"prc_x = models.{}(db_column='prc(%) x')".format(integer_field_type), output
)
self.assertIn("tamaño = models.%s()" % integer_field_type, output)
def test_table_name_introspection(self):
"""
Introspection of table names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command("inspectdb", table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbSpecialTableName(models.Model):", output)
@skipUnlessDBFeature("supports_expression_indexes")
def test_table_with_func_unique_constraint(self):
out = StringIO()
call_command("inspectdb", "inspectdb_funcuniqueconstraint", stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbFuncuniqueconstraint(models.Model):", output)
def test_managed_models(self):
"""
By default the command generates models with `Meta.managed = False`.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
self.longMessage = False
self.assertIn(
" managed = False",
output,
msg="inspectdb should generate unmanaged models.",
)
def test_unique_together_meta(self):
out = StringIO()
call_command("inspectdb", "inspectdb_uniquetogether", stdout=out)
output = out.getvalue()
self.assertIn(" unique_together = (('", output)
unique_together_match = self.unique_re.findall(output)
# There should be one unique_together tuple.
self.assertEqual(len(unique_together_match), 1)
fields = unique_together_match[0]
# Fields with db_column = field name.
self.assertIn("('field1', 'field2')", fields)
# Fields from columns whose names are Python keywords.
self.assertIn("('field1', 'field2')", fields)
# Fields whose names normalize to the same Python field name and hence
# are given an integer suffix.
self.assertIn("('non_unique_column', 'non_unique_column_0')", fields)
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_unsupported_unique_together(self):
"""Unsupported index types (COALESCE here) are skipped."""
with connection.cursor() as c:
c.execute(
"CREATE UNIQUE INDEX Findex ON %s "
"(id, people_unique_id, COALESCE(message_id, -1))"
% PeopleMoreData._meta.db_table
)
try:
out = StringIO()
call_command(
"inspectdb",
table_name_filter=lambda tn: tn.startswith(
PeopleMoreData._meta.db_table
),
stdout=out,
)
output = out.getvalue()
self.assertIn("# A unique constraint could not be introspected.", output)
self.assertEqual(
self.unique_re.findall(output), ["('id', 'people_unique')"]
)
finally:
with connection.cursor() as c:
c.execute("DROP INDEX Findex")
@skipUnless(
connection.vendor == "sqlite",
"Only patched sqlite's DatabaseIntrospection.data_types_reverse for this test",
)
def test_custom_fields(self):
"""
Introspection of columns with a custom field (#21090)
"""
out = StringIO()
with mock.patch(
"django.db.connection.introspection.data_types_reverse."
"base_data_types_reverse",
{
"text": "myfields.TextField",
"bigint": "BigIntegerField",
},
):
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
self.assertIn("text_field = myfields.TextField()", output)
self.assertIn("big_int_field = models.BigIntegerField()", output)
def test_introspection_errors(self):
"""
Introspection errors should not crash the command, and the error should
be visible in the output.
"""
out = StringIO()
with mock.patch(
"django.db.connection.introspection.get_table_list",
return_value=[TableInfo(name="nonexistent", type="t")],
):
call_command("inspectdb", stdout=out)
output = out.getvalue()
self.assertIn("# Unable to inspect table 'nonexistent'", output)
# The error message depends on the backend
self.assertIn("# The error was:", output)
class InspectDBTransactionalTests(TransactionTestCase):
available_apps = ["inspectdb"]
def test_include_views(self):
"""inspectdb --include-views creates models for database views."""
with connection.cursor() as cursor:
cursor.execute(
"CREATE VIEW inspectdb_people_view AS "
"SELECT id, name FROM inspectdb_people"
)
out = StringIO()
view_model = "class InspectdbPeopleView(models.Model):"
view_managed = "managed = False # Created from a view."
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
stdout=out,
)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
include_views=True,
stdout=out,
)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP VIEW inspectdb_people_view")
@skipUnlessDBFeature("can_introspect_materialized_views")
def test_include_materialized_views(self):
"""inspectdb --include-views creates models for materialized views."""
with connection.cursor() as cursor:
cursor.execute(
"CREATE MATERIALIZED VIEW inspectdb_people_materialized AS "
"SELECT id, name FROM inspectdb_people"
)
out = StringIO()
view_model = "class InspectdbPeopleMaterialized(models.Model):"
view_managed = "managed = False # Created from a view."
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
stdout=out,
)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
include_views=True,
stdout=out,
)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP MATERIALIZED VIEW inspectdb_people_materialized")
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_include_partitions(self):
"""inspectdb --include-partitions creates models for partitions."""
with connection.cursor() as cursor:
cursor.execute(
"""\
CREATE TABLE inspectdb_partition_parent (name text not null)
PARTITION BY LIST (left(upper(name), 1))
"""
)
cursor.execute(
"""\
CREATE TABLE inspectdb_partition_child
PARTITION OF inspectdb_partition_parent
FOR VALUES IN ('A', 'B', 'C')
"""
)
out = StringIO()
partition_model_parent = "class InspectdbPartitionParent(models.Model):"
partition_model_child = "class InspectdbPartitionChild(models.Model):"
partition_managed = "managed = False # Created from a partition."
try:
call_command(
"inspectdb", table_name_filter=inspectdb_tables_only, stdout=out
)
no_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, no_partitions_output)
self.assertNotIn(partition_model_child, no_partitions_output)
self.assertNotIn(partition_managed, no_partitions_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_tables_only,
include_partitions=True,
stdout=out,
)
with_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, with_partitions_output)
self.assertIn(partition_model_child, with_partitions_output)
self.assertIn(partition_managed, with_partitions_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP TABLE IF EXISTS inspectdb_partition_child")
cursor.execute("DROP TABLE IF EXISTS inspectdb_partition_parent")
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_foreign_data_wrapper(self):
with connection.cursor() as cursor:
cursor.execute("CREATE EXTENSION IF NOT EXISTS file_fdw")
cursor.execute(
"CREATE SERVER inspectdb_server FOREIGN DATA WRAPPER file_fdw"
)
cursor.execute(
"""\
CREATE FOREIGN TABLE inspectdb_iris_foreign_table (
petal_length real,
petal_width real,
sepal_length real,
sepal_width real
) SERVER inspectdb_server OPTIONS (
filename %s
)
""",
[os.devnull],
)
out = StringIO()
foreign_table_model = "class InspectdbIrisForeignTable(models.Model):"
foreign_table_managed = "managed = False"
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_tables_only,
stdout=out,
)
output = out.getvalue()
self.assertIn(foreign_table_model, output)
self.assertIn(foreign_table_managed, output)
finally:
with connection.cursor() as cursor:
cursor.execute(
"DROP FOREIGN TABLE IF EXISTS inspectdb_iris_foreign_table"
)
cursor.execute("DROP SERVER IF EXISTS inspectdb_server")
cursor.execute("DROP EXTENSION IF EXISTS file_fdw")
|
e6d824cfd9f259ac0d3b28e0195b498d06d13b8c1d5640550eb18aaed9a01161 | from django.db import connection, models
from django.db.models.functions import Lower
class People(models.Model):
name = models.CharField(max_length=255)
parent = models.ForeignKey("self", models.CASCADE)
class Message(models.Model):
from_field = models.ForeignKey(People, models.CASCADE, db_column="from_id")
class PeopleData(models.Model):
people_pk = models.ForeignKey(People, models.CASCADE, primary_key=True)
ssn = models.CharField(max_length=11)
class PeopleMoreData(models.Model):
people_unique = models.ForeignKey(People, models.CASCADE, unique=True)
message = models.ForeignKey(Message, models.CASCADE, blank=True, null=True)
license = models.CharField(max_length=255)
class ForeignKeyToField(models.Model):
to_field_fk = models.ForeignKey(
PeopleMoreData,
models.CASCADE,
to_field="people_unique",
)
class DigitsInColumnName(models.Model):
all_digits = models.CharField(max_length=11, db_column="123")
leading_digit = models.CharField(max_length=11, db_column="4extra")
leading_digits = models.CharField(max_length=11, db_column="45extra")
class SpecialName(models.Model):
field = models.IntegerField(db_column="field")
# Underscores
field_field_0 = models.IntegerField(db_column="Field_")
field_field_1 = models.IntegerField(db_column="Field__")
field_field_2 = models.IntegerField(db_column="__field")
# Other chars
prc_x = models.IntegerField(db_column="prc(%) x")
non_ascii = models.IntegerField(db_column="tamaño")
class Meta:
db_table = "inspectdb_special.table name"
class ColumnTypes(models.Model):
id = models.AutoField(primary_key=True)
big_int_field = models.BigIntegerField()
bool_field = models.BooleanField(default=False)
null_bool_field = models.BooleanField(null=True)
char_field = models.CharField(max_length=10)
null_char_field = models.CharField(max_length=10, blank=True, null=True)
date_field = models.DateField()
date_time_field = models.DateTimeField()
decimal_field = models.DecimalField(max_digits=6, decimal_places=1)
email_field = models.EmailField()
file_field = models.FileField(upload_to="unused")
file_path_field = models.FilePathField()
float_field = models.FloatField()
int_field = models.IntegerField()
gen_ip_address_field = models.GenericIPAddressField(protocol="ipv4")
pos_big_int_field = models.PositiveBigIntegerField()
pos_int_field = models.PositiveIntegerField()
pos_small_int_field = models.PositiveSmallIntegerField()
slug_field = models.SlugField()
small_int_field = models.SmallIntegerField()
text_field = models.TextField()
time_field = models.TimeField()
url_field = models.URLField()
uuid_field = models.UUIDField()
class JSONFieldColumnType(models.Model):
json_field = models.JSONField()
null_json_field = models.JSONField(blank=True, null=True)
class Meta:
required_db_features = {
"can_introspect_json_field",
"supports_json_field",
}
test_collation = connection.features.test_collations.get("non_default")
class CharFieldDbCollation(models.Model):
char_field = models.CharField(max_length=10, db_collation=test_collation)
class Meta:
required_db_features = {"supports_collation_on_charfield"}
class TextFieldDbCollation(models.Model):
text_field = models.TextField(db_collation=test_collation)
class Meta:
required_db_features = {"supports_collation_on_textfield"}
class UniqueTogether(models.Model):
field1 = models.IntegerField()
field2 = models.CharField(max_length=10)
from_field = models.IntegerField(db_column="from")
non_unique = models.IntegerField(db_column="non__unique_column")
non_unique_0 = models.IntegerField(db_column="non_unique__column")
class Meta:
unique_together = [
("field1", "field2"),
("from_field", "field1"),
("non_unique", "non_unique_0"),
]
class FuncUniqueConstraint(models.Model):
name = models.CharField(max_length=255)
rank = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
Lower("name"), models.F("rank"), name="index_lower_name"
)
]
required_db_features = {"supports_expression_indexes"}
|
4f424c9f18cb31e7a8e9aa427a4d013041eb7655871fd525d9cf6e487ffec558 | import asyncio
import sys
import threading
from pathlib import Path
from asgiref.testing import ApplicationCommunicator
from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler
from django.core.asgi import get_asgi_application
from django.core.signals import request_finished, request_started
from django.db import close_old_connections
from django.test import (
AsyncRequestFactory,
SimpleTestCase,
modify_settings,
override_settings,
)
from django.utils.http import http_date
from .urls import sync_waiter, test_filename
TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static"
@override_settings(ROOT_URLCONF="asgi.urls")
class ASGITest(SimpleTestCase):
async_request_factory = AsyncRequestFactory()
def setUp(self):
request_started.disconnect(close_old_connections)
def tearDown(self):
request_started.connect(close_old_connections)
async def test_get_asgi_application(self):
"""
get_asgi_application() returns a functioning ASGI callable.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Read the response.
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", b"12"),
(b"Content-Type", b"text/html; charset=utf-8"),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
async def test_file_response(self):
"""
Makes sure that FileResponse works over ASGI.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/file/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Get the file content.
with open(test_filename, "rb") as test_file:
test_file_contents = test_file.read()
# Read the response.
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
headers = response_start["headers"]
self.assertEqual(len(headers), 3)
expected_headers = {
b"Content-Length": str(len(test_file_contents)).encode("ascii"),
b"Content-Type": b"text/x-python",
b"Content-Disposition": b'inline; filename="urls.py"',
}
for key, value in headers:
try:
self.assertEqual(value, expected_headers[key])
except AssertionError:
# Windows registry may not be configured with correct
# mimetypes.
if sys.platform == "win32" and key == b"Content-Type":
self.assertEqual(value, b"text/plain")
else:
raise
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
@modify_settings(INSTALLED_APPS={"append": "django.contrib.staticfiles"})
@override_settings(
STATIC_URL="static/",
STATIC_ROOT=TEST_STATIC_ROOT,
STATICFILES_DIRS=[TEST_STATIC_ROOT],
STATICFILES_FINDERS=[
"django.contrib.staticfiles.finders.FileSystemFinder",
],
)
async def test_static_file_response(self):
application = ASGIStaticFilesHandler(get_asgi_application())
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/static/file.txt")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Get the file content.
file_path = TEST_STATIC_ROOT / "file.txt"
with open(file_path, "rb") as test_file:
test_file_contents = test_file.read()
# Read the response.
stat = file_path.stat()
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", str(len(test_file_contents)).encode("ascii")),
(b"Content-Type", b"text/plain"),
(b"Content-Disposition", b'inline; filename="file.txt"'),
(b"Last-Modified", http_date(stat.st_mtime).encode("ascii")),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
async def test_headers(self):
application = get_asgi_application()
communicator = ApplicationCommunicator(
application,
self.async_request_factory._base_scope(
path="/meta/",
headers=[
[b"content-type", b"text/plain; charset=utf-8"],
[b"content-length", b"77"],
[b"referer", b"Scotland"],
[b"referer", b"Wales"],
],
),
)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", b"19"),
(b"Content-Type", b"text/plain; charset=utf-8"),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"From Scotland,Wales")
async def test_get_query_string(self):
application = get_asgi_application()
for query_string in (b"name=Andrew", "name=Andrew"):
with self.subTest(query_string=query_string):
scope = self.async_request_factory._base_scope(
path="/",
query_string=query_string,
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello Andrew!")
async def test_disconnect(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.disconnect"})
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
async def test_wrong_connection_type(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/", type="other")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
msg = "Django can only handle ASGI/HTTP connections, not other."
with self.assertRaisesMessage(ValueError, msg):
await communicator.receive_output()
async def test_non_unicode_query_string(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/", query_string=b"\xff")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 400)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"")
async def test_request_lifecycle_signals_dispatched_with_thread_sensitive(self):
class SignalHandler:
"""Track threads handler is dispatched on."""
threads = []
def __call__(self, **kwargs):
self.threads.append(threading.current_thread())
signal_handler = SignalHandler()
request_started.connect(signal_handler)
request_finished.connect(signal_handler)
# Perform a basic request.
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
# AsyncToSync should have executed the signals in the same thread.
request_started_thread, request_finished_thread = signal_handler.threads
self.assertEqual(request_started_thread, request_finished_thread)
request_started.disconnect(signal_handler)
request_finished.disconnect(signal_handler)
async def test_concurrent_async_uses_multiple_thread_pools(self):
sync_waiter.active_threads.clear()
# Send 2 requests concurrently
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/wait/")
communicators = []
for _ in range(2):
communicators.append(ApplicationCommunicator(application, scope))
await communicators[-1].send_input({"type": "http.request"})
# Each request must complete with a status code of 200
# If requests aren't scheduled concurrently, the barrier in the
# sync_wait view will time out, resulting in a 500 status code.
for communicator in communicators:
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
# The requests should have scheduled on different threads. Note
# active_threads is a set (a thread can only appear once), therefore
# length is a sufficient check.
self.assertEqual(len(sync_waiter.active_threads), 2)
sync_waiter.active_threads.clear()
|
350087ba587a27c8d8b18566431964ac9421ecef96b1db0ab4fda24b39327fe8 | from django.contrib.contenttypes.models import ContentType, ContentTypeManager
from django.db import models
from django.test import TestCase, override_settings
from django.test.utils import isolate_apps
from .models import Author, ConcreteModel, FooWithUrl, ProxyModel
class ContentTypesTests(TestCase):
def setUp(self):
ContentType.objects.clear_cache()
def tearDown(self):
ContentType.objects.clear_cache()
def test_lookup_cache(self):
"""
The content type cache (see ContentTypeManager) works correctly.
Lookups for a particular content type -- by model, ID, or natural key
-- should hit the database only on the first lookup.
"""
# At this point, a lookup for a ContentType should hit the DB
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# A second hit, though, won't hit the DB, nor will a lookup by ID
# or natural key
with self.assertNumQueries(0):
ct = ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(0):
ContentType.objects.get_for_id(ct.id)
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key("contenttypes", "contenttype")
# Once we clear the cache, another lookup will again hit the DB
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# The same should happen with a lookup by natural key
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_by_natural_key("contenttypes", "contenttype")
# And a second hit shouldn't hit the DB
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key("contenttypes", "contenttype")
def test_get_for_models_creation(self):
ContentType.objects.all().delete()
with self.assertNumQueries(4):
cts = ContentType.objects.get_for_models(
ContentType, FooWithUrl, ProxyModel, ConcreteModel
)
self.assertEqual(
cts,
{
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
ProxyModel: ContentType.objects.get_for_model(ProxyModel),
ConcreteModel: ContentType.objects.get_for_model(ConcreteModel),
},
)
def test_get_for_models_empty_cache(self):
# Empty cache.
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(
ContentType, FooWithUrl, ProxyModel, ConcreteModel
)
self.assertEqual(
cts,
{
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
ProxyModel: ContentType.objects.get_for_model(ProxyModel),
ConcreteModel: ContentType.objects.get_for_model(ConcreteModel),
},
)
def test_get_for_models_partial_cache(self):
# Partial cache
ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(
cts,
{
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
},
)
def test_get_for_models_full_cache(self):
# Full cache
ContentType.objects.get_for_model(ContentType)
ContentType.objects.get_for_model(FooWithUrl)
with self.assertNumQueries(0):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(
cts,
{
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
},
)
@isolate_apps("contenttypes_tests")
def test_get_for_model_create_contenttype(self):
"""
ContentTypeManager.get_for_model() creates the corresponding content
type if it doesn't exist in the database.
"""
class ModelCreatedOnTheFly(models.Model):
name = models.CharField()
ct = ContentType.objects.get_for_model(ModelCreatedOnTheFly)
self.assertEqual(ct.app_label, "contenttypes_tests")
self.assertEqual(ct.model, "modelcreatedonthefly")
self.assertEqual(str(ct), "modelcreatedonthefly")
def test_get_for_concrete_model(self):
"""
Make sure the `for_concrete_model` kwarg correctly works
with concrete, proxy and deferred models
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
self.assertEqual(
concrete_model_ct, ContentType.objects.get_for_model(ProxyModel)
)
self.assertEqual(
concrete_model_ct,
ContentType.objects.get_for_model(ConcreteModel, for_concrete_model=False),
)
proxy_model_ct = ContentType.objects.get_for_model(
ProxyModel, for_concrete_model=False
)
self.assertNotEqual(concrete_model_ct, proxy_model_ct)
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only("pk").get().__class__
DeferredProxyModel = ProxyModel.objects.only("pk").get().__class__
self.assertEqual(
concrete_model_ct, ContentType.objects.get_for_model(DeferredConcreteModel)
)
self.assertEqual(
concrete_model_ct,
ContentType.objects.get_for_model(
DeferredConcreteModel, for_concrete_model=False
),
)
self.assertEqual(
concrete_model_ct, ContentType.objects.get_for_model(DeferredProxyModel)
)
self.assertEqual(
proxy_model_ct,
ContentType.objects.get_for_model(
DeferredProxyModel, for_concrete_model=False
),
)
def test_get_for_concrete_models(self):
"""
Make sure the `for_concrete_models` kwarg correctly works
with concrete, proxy and deferred models.
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel)
self.assertEqual(
cts,
{
ConcreteModel: concrete_model_ct,
ProxyModel: concrete_model_ct,
},
)
proxy_model_ct = ContentType.objects.get_for_model(
ProxyModel, for_concrete_model=False
)
cts = ContentType.objects.get_for_models(
ConcreteModel, ProxyModel, for_concrete_models=False
)
self.assertEqual(
cts,
{
ConcreteModel: concrete_model_ct,
ProxyModel: proxy_model_ct,
},
)
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only("pk").get().__class__
DeferredProxyModel = ProxyModel.objects.only("pk").get().__class__
cts = ContentType.objects.get_for_models(
DeferredConcreteModel, DeferredProxyModel
)
self.assertEqual(
cts,
{
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: concrete_model_ct,
},
)
cts = ContentType.objects.get_for_models(
DeferredConcreteModel, DeferredProxyModel, for_concrete_models=False
)
self.assertEqual(
cts,
{
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: proxy_model_ct,
},
)
def test_cache_not_shared_between_managers(self):
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(0):
ContentType.objects.get_for_model(ContentType)
other_manager = ContentTypeManager()
other_manager.model = ContentType
with self.assertNumQueries(1):
other_manager.get_for_model(ContentType)
with self.assertNumQueries(0):
other_manager.get_for_model(ContentType)
def test_missing_model(self):
"""
Displaying content types in admin (or anywhere) doesn't break on
leftover content type records in the DB for which no model is defined
anymore.
"""
ct = ContentType.objects.create(
app_label="contenttypes",
model="OldModel",
)
self.assertEqual(str(ct), "OldModel")
self.assertIsNone(ct.model_class())
# Stale ContentTypes can be fetched like any other object.
ct_fetched = ContentType.objects.get_for_id(ct.pk)
self.assertIsNone(ct_fetched.model_class())
def test_missing_model_with_existing_model_name(self):
"""
Displaying content types in admin (or anywhere) doesn't break on
leftover content type records in the DB for which no model is defined
anymore, even if a model with the same name exists in another app.
"""
# Create a stale ContentType that matches the name of an existing
# model.
ContentType.objects.create(app_label="contenttypes", model="author")
ContentType.objects.clear_cache()
# get_for_models() should work as expected for existing models.
cts = ContentType.objects.get_for_models(ContentType, Author)
self.assertEqual(
cts,
{
ContentType: ContentType.objects.get_for_model(ContentType),
Author: ContentType.objects.get_for_model(Author),
},
)
def test_str(self):
ct = ContentType.objects.get(app_label="contenttypes_tests", model="site")
self.assertEqual(str(ct), "contenttypes_tests | site")
def test_app_labeled_name(self):
ct = ContentType.objects.get(app_label="contenttypes_tests", model="site")
self.assertEqual(ct.app_labeled_name, "contenttypes_tests | site")
def test_app_labeled_name_unknown_model(self):
ct = ContentType(app_label="contenttypes_tests", model="unknown")
self.assertEqual(ct.app_labeled_name, "unknown")
class TestRouter:
def db_for_read(self, model, **hints):
return "other"
def db_for_write(self, model, **hints):
return "default"
def allow_relation(self, obj1, obj2, **hints):
return True
@override_settings(DATABASE_ROUTERS=[TestRouter()])
class ContentTypesMultidbTests(TestCase):
databases = {"default", "other"}
def test_multidb(self):
"""
When using multiple databases, ContentType.objects.get_for_model() uses
db_for_read().
"""
ContentType.objects.clear_cache()
with self.assertNumQueries(0, using="default"), self.assertNumQueries(
1, using="other"
):
ContentType.objects.get_for_model(Author)
|
c8277207dc6d7f23190a27272530eb85c07d405739f113f767b966ffa1b3d689 | import datetime
from django.test import TestCase
from .models import Person
class RecursiveM2MTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a, cls.b, cls.c, cls.d = [
Person.objects.create(name=name)
for name in ["Anne", "Bill", "Chuck", "David"]
]
cls.a.friends.add(cls.b, cls.c)
# Add m2m for Anne and Chuck in reverse direction.
cls.d.friends.add(cls.a, cls.c)
def test_recursive_m2m_all(self):
for person, friends in (
(self.a, [self.b, self.c, self.d]),
(self.b, [self.a]),
(self.c, [self.a, self.d]),
(self.d, [self.a, self.c]),
):
with self.subTest(person=person):
self.assertSequenceEqual(person.friends.all(), friends)
def test_recursive_m2m_reverse_add(self):
# Add m2m for Anne in reverse direction.
self.b.friends.add(self.a)
self.assertSequenceEqual(self.a.friends.all(), [self.b, self.c, self.d])
self.assertSequenceEqual(self.b.friends.all(), [self.a])
def test_recursive_m2m_remove(self):
self.b.friends.remove(self.a)
self.assertSequenceEqual(self.a.friends.all(), [self.c, self.d])
self.assertSequenceEqual(self.b.friends.all(), [])
def test_recursive_m2m_clear(self):
# Clear m2m for Anne.
self.a.friends.clear()
self.assertSequenceEqual(self.a.friends.all(), [])
# Reverse m2m relationships should be removed.
self.assertSequenceEqual(self.c.friends.all(), [self.d])
self.assertSequenceEqual(self.d.friends.all(), [self.c])
def test_recursive_m2m_add_via_related_name(self):
# Add m2m with custom related name for Anne in reverse direction.
self.d.stalkers.add(self.a)
self.assertSequenceEqual(self.a.idols.all(), [self.d])
self.assertSequenceEqual(self.a.stalkers.all(), [])
def test_recursive_m2m_add_in_both_directions(self):
# Adding the same relation twice results in a single relation.
self.a.idols.add(self.d)
self.d.stalkers.add(self.a)
self.assertSequenceEqual(self.a.idols.all(), [self.d])
def test_recursive_m2m_related_to_self(self):
self.a.idols.add(self.a)
self.assertSequenceEqual(self.a.idols.all(), [self.a])
self.assertSequenceEqual(self.a.stalkers.all(), [self.a])
class RecursiveSymmetricalM2MThroughTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a, cls.b, cls.c, cls.d = [
Person.objects.create(name=name)
for name in ["Anne", "Bill", "Chuck", "David"]
]
cls.a.colleagues.add(
cls.b,
cls.c,
through_defaults={
"first_meet": datetime.date(2013, 1, 5),
},
)
# Add m2m for Anne and Chuck in reverse direction.
cls.d.colleagues.add(
cls.a,
cls.c,
through_defaults={
"first_meet": datetime.date(2015, 6, 15),
},
)
def test_recursive_m2m_all(self):
for person, colleagues in (
(self.a, [self.b, self.c, self.d]),
(self.b, [self.a]),
(self.c, [self.a, self.d]),
(self.d, [self.a, self.c]),
):
with self.subTest(person=person):
self.assertSequenceEqual(person.colleagues.all(), colleagues)
def test_recursive_m2m_reverse_add(self):
# Add m2m for Anne in reverse direction.
self.b.colleagues.add(
self.a,
through_defaults={
"first_meet": datetime.date(2013, 1, 5),
},
)
self.assertCountEqual(self.a.colleagues.all(), [self.b, self.c, self.d])
self.assertSequenceEqual(self.b.colleagues.all(), [self.a])
def test_recursive_m2m_remove(self):
self.b.colleagues.remove(self.a)
self.assertSequenceEqual(self.a.colleagues.all(), [self.c, self.d])
self.assertSequenceEqual(self.b.colleagues.all(), [])
def test_recursive_m2m_clear(self):
# Clear m2m for Anne.
self.a.colleagues.clear()
self.assertSequenceEqual(self.a.friends.all(), [])
# Reverse m2m relationships is removed.
self.assertSequenceEqual(self.c.colleagues.all(), [self.d])
self.assertSequenceEqual(self.d.colleagues.all(), [self.c])
def test_recursive_m2m_set(self):
# Set new relationships for Chuck.
self.c.colleagues.set(
[self.b, self.d],
through_defaults={
"first_meet": datetime.date(2013, 1, 5),
},
)
self.assertSequenceEqual(self.c.colleagues.order_by("name"), [self.b, self.d])
# Reverse m2m relationships is removed.
self.assertSequenceEqual(self.a.colleagues.order_by("name"), [self.b, self.d])
|
6ba79e1ad04fcfacfd973865da72ad3914300df7a5b0db4460b5f68261184162 | import sys
import threading
import time
from unittest import skipIf, skipUnless
from django.db import (
DatabaseError,
Error,
IntegrityError,
OperationalError,
connection,
transaction,
)
from django.test import (
TestCase,
TransactionTestCase,
skipIfDBFeature,
skipUnlessDBFeature,
)
from .models import Reporter
@skipUnlessDBFeature("uses_savepoints")
class AtomicTests(TransactionTestCase):
"""
Tests for the atomic decorator and context manager.
The tests make assertions on internal attributes because there isn't a
robust way to ask the database for its current transaction state.
Since the decorator syntax is converted into a context manager (see the
implementation), there are only a few basic tests with the decorator
syntax and the bulk of the tests use the context manager syntax.
"""
available_apps = ["transactions"]
def test_decorator_syntax_commit(self):
@transaction.atomic
def make_reporter():
return Reporter.objects.create(first_name="Tintin")
reporter = make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_decorator_syntax_rollback(self):
@transaction.atomic
def make_reporter():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
with self.assertRaisesMessage(Exception, "Oops"):
make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_alternate_decorator_syntax_commit(self):
@transaction.atomic()
def make_reporter():
return Reporter.objects.create(first_name="Tintin")
reporter = make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_alternate_decorator_syntax_rollback(self):
@transaction.atomic()
def make_reporter():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
with self.assertRaisesMessage(Exception, "Oops"):
make_reporter()
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_commit(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_nested_commit_commit(self):
with transaction.atomic():
reporter1 = Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
reporter2 = Reporter.objects.create(
first_name="Archibald", last_name="Haddock"
)
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_nested_commit_rollback(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_nested_rollback_commit(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with transaction.atomic():
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_nested_rollback_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_commit_commit(self):
with transaction.atomic():
reporter1 = Reporter.objects.create(first_name="Tintin")
with transaction.atomic(savepoint=False):
reporter2 = Reporter.objects.create(
first_name="Archibald", last_name="Haddock"
)
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_merged_commit_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
# Writes in the outer block are rolled back too.
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_rollback_commit(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_rollback_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_reuse_commit_commit(self):
atomic = transaction.atomic()
with atomic:
reporter1 = Reporter.objects.create(first_name="Tintin")
with atomic:
reporter2 = Reporter.objects.create(
first_name="Archibald", last_name="Haddock"
)
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_reuse_commit_rollback(self):
atomic = transaction.atomic()
with atomic:
reporter = Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
def test_reuse_rollback_commit(self):
atomic = transaction.atomic()
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(last_name="Tintin")
with atomic:
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_reuse_rollback_rollback(self):
atomic = transaction.atomic()
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_force_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
# atomic block shouldn't rollback, but force it.
self.assertFalse(transaction.get_rollback())
transaction.set_rollback(True)
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_prevent_rollback(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
sid = transaction.savepoint()
# trigger a database error inside an inner atomic without savepoint
with self.assertRaises(DatabaseError):
with transaction.atomic(savepoint=False):
with connection.cursor() as cursor:
cursor.execute("SELECT no_such_col FROM transactions_reporter")
# prevent atomic from rolling back since we're recovering manually
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
transaction.savepoint_rollback(sid)
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
class AtomicInsideTransactionTests(AtomicTests):
"""All basic tests for atomic should also pass within an existing transaction."""
def setUp(self):
self.atomic = transaction.atomic()
self.atomic.__enter__()
def tearDown(self):
self.atomic.__exit__(*sys.exc_info())
class AtomicWithoutAutocommitTests(AtomicTests):
"""All basic tests for atomic should also pass when autocommit is turned off."""
def setUp(self):
transaction.set_autocommit(False)
def tearDown(self):
# The tests access the database after exercising 'atomic', initiating
# a transaction ; a rollback is required before restoring autocommit.
transaction.rollback()
transaction.set_autocommit(True)
@skipUnlessDBFeature("uses_savepoints")
class AtomicMergeTests(TransactionTestCase):
"""Test merging transactions with savepoint=False."""
available_apps = ["transactions"]
def test_merged_outer_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Calculus")
raise Exception("Oops, that's his last name")
# The third insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The second insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The first block has a savepoint and must roll back.
self.assertSequenceEqual(Reporter.objects.all(), [])
def test_merged_inner_savepoint_rollback(self):
with transaction.atomic():
reporter = Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Calculus")
raise Exception("Oops, that's his last name")
# The third insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The second block has a savepoint and must roll back.
self.assertEqual(Reporter.objects.count(), 1)
self.assertSequenceEqual(Reporter.objects.all(), [reporter])
@skipUnlessDBFeature("uses_savepoints")
class AtomicErrorsTests(TransactionTestCase):
available_apps = ["transactions"]
forbidden_atomic_msg = "This is forbidden when an 'atomic' block is active."
def test_atomic_prevents_setting_autocommit(self):
autocommit = transaction.get_autocommit()
with transaction.atomic():
with self.assertRaisesMessage(
transaction.TransactionManagementError, self.forbidden_atomic_msg
):
transaction.set_autocommit(not autocommit)
# Make sure autocommit wasn't changed.
self.assertEqual(connection.autocommit, autocommit)
def test_atomic_prevents_calling_transaction_methods(self):
with transaction.atomic():
with self.assertRaisesMessage(
transaction.TransactionManagementError, self.forbidden_atomic_msg
):
transaction.commit()
with self.assertRaisesMessage(
transaction.TransactionManagementError, self.forbidden_atomic_msg
):
transaction.rollback()
def test_atomic_prevents_queries_in_broken_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
# The transaction is marked as needing rollback.
msg = (
"An error occurred in the current transaction. You can't "
"execute queries until the end of the 'atomic' block."
)
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
r2.save(force_update=True)
self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Haddock")
@skipIfDBFeature("atomic_transactions")
def test_atomic_allows_queries_after_fixing_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
# Mark the transaction as no longer needing rollback.
transaction.set_rollback(False)
r2.save(force_update=True)
self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Calculus")
@skipUnlessDBFeature("test_db_allows_multiple_connections")
def test_atomic_prevents_queries_in_broken_transaction_after_client_close(self):
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
connection.close()
# The connection is closed and the transaction is marked as
# needing rollback. This will raise an InterfaceError on databases
# that refuse to create cursors on closed connections (PostgreSQL)
# and a TransactionManagementError on other databases.
with self.assertRaises(Error):
Reporter.objects.create(first_name="Cuthbert", last_name="Calculus")
# The connection is usable again .
self.assertEqual(Reporter.objects.count(), 0)
@skipUnlessDBFeature("uses_savepoints")
@skipUnless(connection.vendor == "mysql", "MySQL-specific behaviors")
class AtomicMySQLTests(TransactionTestCase):
available_apps = ["transactions"]
@skipIf(threading is None, "Test requires threading")
def test_implicit_savepoint_rollback(self):
"""MySQL implicitly rolls back savepoints when it deadlocks (#22291)."""
Reporter.objects.create(id=1)
Reporter.objects.create(id=2)
main_thread_ready = threading.Event()
def other_thread():
try:
with transaction.atomic():
Reporter.objects.select_for_update().get(id=1)
main_thread_ready.wait()
# 1) This line locks... (see below for 2)
Reporter.objects.exclude(id=1).update(id=2)
finally:
# This is the thread-local connection, not the main connection.
connection.close()
other_thread = threading.Thread(target=other_thread)
other_thread.start()
with self.assertRaisesMessage(OperationalError, "Deadlock found"):
# Double atomic to enter a transaction and create a savepoint.
with transaction.atomic():
with transaction.atomic():
Reporter.objects.select_for_update().get(id=2)
main_thread_ready.set()
# The two threads can't be synchronized with an event here
# because the other thread locks. Sleep for a little while.
time.sleep(1)
# 2) ... and this line deadlocks. (see above for 1)
Reporter.objects.exclude(id=2).update(id=1)
other_thread.join()
class AtomicMiscTests(TransactionTestCase):
available_apps = ["transactions"]
def test_wrap_callable_instance(self):
"""#20028 -- Atomic must support wrapping callable instances."""
class Callable:
def __call__(self):
pass
# Must not raise an exception
transaction.atomic(Callable())
@skipUnlessDBFeature("can_release_savepoints")
def test_atomic_does_not_leak_savepoints_on_failure(self):
"""#23074 -- Savepoints must be released after rollback."""
# Expect an error when rolling back a savepoint that doesn't exist.
# Done outside of the transaction block to ensure proper recovery.
with self.assertRaises(Error):
# Start a plain transaction.
with transaction.atomic():
# Swallow the intentional error raised in the sub-transaction.
with self.assertRaisesMessage(Exception, "Oops"):
# Start a sub-transaction with a savepoint.
with transaction.atomic():
sid = connection.savepoint_ids[-1]
raise Exception("Oops")
# This is expected to fail because the savepoint no longer exists.
connection.savepoint_rollback(sid)
def test_mark_for_rollback_on_error_in_transaction(self):
with transaction.atomic(savepoint=False):
# Swallow the intentional error raised.
with self.assertRaisesMessage(Exception, "Oops"):
# Wrap in `mark_for_rollback_on_error` to check if the
# transaction is marked broken.
with transaction.mark_for_rollback_on_error():
# Ensure that we are still in a good state.
self.assertFalse(transaction.get_rollback())
raise Exception("Oops")
# mark_for_rollback_on_error marked the transaction as broken …
self.assertTrue(transaction.get_rollback())
# … and further queries fail.
msg = "You can't execute queries until the end of the 'atomic' block."
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
Reporter.objects.create()
# Transaction errors are reset at the end of an transaction, so this
# should just work.
Reporter.objects.create()
def test_mark_for_rollback_on_error_in_autocommit(self):
self.assertTrue(transaction.get_autocommit())
# Swallow the intentional error raised.
with self.assertRaisesMessage(Exception, "Oops"):
# Wrap in `mark_for_rollback_on_error` to check if the transaction
# is marked broken.
with transaction.mark_for_rollback_on_error():
# Ensure that we are still in a good state.
self.assertFalse(transaction.get_connection().needs_rollback)
raise Exception("Oops")
# Ensure that `mark_for_rollback_on_error` did not mark the transaction
# as broken, since we are in autocommit mode …
self.assertFalse(transaction.get_connection().needs_rollback)
# … and further queries work nicely.
Reporter.objects.create()
class NonAutocommitTests(TransactionTestCase):
available_apps = []
def setUp(self):
transaction.set_autocommit(False)
def tearDown(self):
transaction.rollback()
transaction.set_autocommit(True)
def test_orm_query_after_error_and_rollback(self):
"""
ORM queries are allowed after an error and a rollback in non-autocommit
mode (#27504).
"""
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
transaction.rollback()
Reporter.objects.last()
def test_orm_query_without_autocommit(self):
"""#24921 -- ORM queries must be possible after set_autocommit(False)."""
Reporter.objects.create(first_name="Tintin")
class DurableTestsBase:
available_apps = ["transactions"]
def test_commit(self):
with transaction.atomic(durable=True):
reporter = Reporter.objects.create(first_name="Tintin")
self.assertEqual(Reporter.objects.get(), reporter)
def test_nested_outer_durable(self):
with transaction.atomic(durable=True):
reporter1 = Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
reporter2 = Reporter.objects.create(
first_name="Archibald",
last_name="Haddock",
)
self.assertSequenceEqual(Reporter.objects.all(), [reporter2, reporter1])
def test_nested_both_durable(self):
msg = "A durable atomic block cannot be nested within another atomic block."
with transaction.atomic(durable=True):
with self.assertRaisesMessage(RuntimeError, msg):
with transaction.atomic(durable=True):
pass
def test_nested_inner_durable(self):
msg = "A durable atomic block cannot be nested within another atomic block."
with transaction.atomic():
with self.assertRaisesMessage(RuntimeError, msg):
with transaction.atomic(durable=True):
pass
def test_sequence_of_durables(self):
with transaction.atomic(durable=True):
reporter = Reporter.objects.create(first_name="Tintin 1")
self.assertEqual(Reporter.objects.get(first_name="Tintin 1"), reporter)
with transaction.atomic(durable=True):
reporter = Reporter.objects.create(first_name="Tintin 2")
self.assertEqual(Reporter.objects.get(first_name="Tintin 2"), reporter)
class DurableTransactionTests(DurableTestsBase, TransactionTestCase):
pass
class DurableTests(DurableTestsBase, TestCase):
pass
|
df64643309d419f1d65aab6d0c3846791a9629e88ce921464b0180adb597f5d6 | import threading
from datetime import datetime, timedelta
from unittest import mock
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from django.db import DEFAULT_DB_ALIAS, DatabaseError, connections, models
from django.db.models.manager import BaseManager
from django.db.models.query import MAX_GET_RESULTS, EmptyQuerySet
from django.test import (
SimpleTestCase,
TestCase,
TransactionTestCase,
skipUnlessDBFeature,
)
from django.utils.translation import gettext_lazy
from .models import (
Article,
ArticleSelectOnSave,
ChildPrimaryKeyWithDefault,
FeaturedArticle,
PrimaryKeyWithDefault,
SelfRef,
)
class ModelInstanceCreationTests(TestCase):
def test_object_is_not_written_to_database_until_save_was_called(self):
a = Article(
id=None,
headline="Parrot programs in Python",
pub_date=datetime(2005, 7, 28),
)
self.assertIsNone(a.id)
self.assertEqual(Article.objects.count(), 0)
# Save it into the database. You have to call save() explicitly.
a.save()
self.assertIsNotNone(a.id)
self.assertEqual(Article.objects.count(), 1)
def test_can_initialize_model_instance_using_positional_arguments(self):
"""
You can initialize a model instance using positional arguments,
which should match the field order as defined in the model.
"""
a = Article(None, "Second article", datetime(2005, 7, 29))
a.save()
self.assertEqual(a.headline, "Second article")
self.assertEqual(a.pub_date, datetime(2005, 7, 29, 0, 0))
def test_can_create_instance_using_kwargs(self):
a = Article(
id=None,
headline="Third article",
pub_date=datetime(2005, 7, 30),
)
a.save()
self.assertEqual(a.headline, "Third article")
self.assertEqual(a.pub_date, datetime(2005, 7, 30, 0, 0))
def test_autofields_generate_different_values_for_each_instance(self):
a1 = Article.objects.create(
headline="First", pub_date=datetime(2005, 7, 30, 0, 0)
)
a2 = Article.objects.create(
headline="First", pub_date=datetime(2005, 7, 30, 0, 0)
)
a3 = Article.objects.create(
headline="First", pub_date=datetime(2005, 7, 30, 0, 0)
)
self.assertNotEqual(a3.id, a1.id)
self.assertNotEqual(a3.id, a2.id)
def test_can_mix_and_match_position_and_kwargs(self):
# You can also mix and match position and keyword arguments, but
# be sure not to duplicate field information.
a = Article(None, "Fourth article", pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, "Fourth article")
def test_positional_and_keyword_args_for_the_same_field(self):
msg = "Article() got both positional and keyword arguments for field '%s'."
with self.assertRaisesMessage(TypeError, msg % "headline"):
Article(None, "Fifth article", headline="Other headline.")
with self.assertRaisesMessage(TypeError, msg % "headline"):
Article(None, "Sixth article", headline="")
with self.assertRaisesMessage(TypeError, msg % "pub_date"):
Article(None, "Seventh article", datetime(2021, 3, 1), pub_date=None)
def test_cannot_create_instance_with_invalid_kwargs(self):
msg = "Article() got unexpected keyword arguments: 'foo'"
with self.assertRaisesMessage(TypeError, msg):
Article(
id=None,
headline="Some headline",
pub_date=datetime(2005, 7, 31),
foo="bar",
)
msg = "Article() got unexpected keyword arguments: 'foo', 'bar'"
with self.assertRaisesMessage(TypeError, msg):
Article(
id=None,
headline="Some headline",
pub_date=datetime(2005, 7, 31),
foo="bar",
bar="baz",
)
def test_can_leave_off_value_for_autofield_and_it_gets_value_on_save(self):
"""
You can leave off the value for an AutoField when creating an
object, because it'll get filled in automatically when you save().
"""
a = Article(headline="Article 5", pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, "Article 5")
self.assertIsNotNone(a.id)
def test_leaving_off_a_field_with_default_set_the_default_will_be_saved(self):
a = Article(pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, "Default headline")
def test_for_datetimefields_saves_as_much_precision_as_was_given(self):
"""as much precision in *seconds*"""
a1 = Article(
headline="Article 7",
pub_date=datetime(2005, 7, 31, 12, 30),
)
a1.save()
self.assertEqual(
Article.objects.get(id__exact=a1.id).pub_date, datetime(2005, 7, 31, 12, 30)
)
a2 = Article(
headline="Article 8",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a2.save()
self.assertEqual(
Article.objects.get(id__exact=a2.id).pub_date,
datetime(2005, 7, 31, 12, 30, 45),
)
def test_saving_an_object_again_does_not_create_a_new_object(self):
a = Article(headline="original", pub_date=datetime(2014, 5, 16))
a.save()
current_id = a.id
a.save()
self.assertEqual(a.id, current_id)
a.headline = "Updated headline"
a.save()
self.assertEqual(a.id, current_id)
def test_querysets_checking_for_membership(self):
headlines = ["Parrot programs in Python", "Second article", "Third article"]
some_pub_date = datetime(2014, 5, 16, 12, 1)
for headline in headlines:
Article(headline=headline, pub_date=some_pub_date).save()
a = Article(headline="Some headline", pub_date=some_pub_date)
a.save()
# You can use 'in' to test for membership...
self.assertIn(a, Article.objects.all())
# ... but there will often be more efficient ways if that is all you need:
self.assertTrue(Article.objects.filter(id=a.id).exists())
def test_save_primary_with_default(self):
# An UPDATE attempt is skipped when a primary key has default.
with self.assertNumQueries(1):
PrimaryKeyWithDefault().save()
def test_save_parent_primary_with_default(self):
# An UPDATE attempt is skipped when an inherited primary key has
# default.
with self.assertNumQueries(2):
ChildPrimaryKeyWithDefault().save()
class ModelTest(TestCase):
def test_objects_attribute_is_only_available_on_the_class_itself(self):
with self.assertRaisesMessage(
AttributeError, "Manager isn't accessible via Article instances"
):
getattr(
Article(),
"objects",
)
self.assertFalse(hasattr(Article(), "objects"))
self.assertTrue(hasattr(Article, "objects"))
def test_queryset_delete_removes_all_items_in_that_queryset(self):
headlines = ["An article", "Article One", "Amazing article", "Boring article"]
some_pub_date = datetime(2014, 5, 16, 12, 1)
for headline in headlines:
Article(headline=headline, pub_date=some_pub_date).save()
self.assertQuerysetEqual(
Article.objects.order_by("headline"),
sorted(headlines),
transform=lambda a: a.headline,
)
Article.objects.filter(headline__startswith="A").delete()
self.assertEqual(Article.objects.get().headline, "Boring article")
def test_not_equal_and_equal_operators_behave_as_expected_on_instances(self):
some_pub_date = datetime(2014, 5, 16, 12, 1)
a1 = Article.objects.create(headline="First", pub_date=some_pub_date)
a2 = Article.objects.create(headline="Second", pub_date=some_pub_date)
self.assertNotEqual(a1, a2)
self.assertEqual(a1, Article.objects.get(id__exact=a1.id))
self.assertNotEqual(
Article.objects.get(id__exact=a1.id), Article.objects.get(id__exact=a2.id)
)
def test_microsecond_precision(self):
a9 = Article(
headline="Article 9",
pub_date=datetime(2005, 7, 31, 12, 30, 45, 180),
)
a9.save()
self.assertEqual(
Article.objects.get(pk=a9.pk).pub_date,
datetime(2005, 7, 31, 12, 30, 45, 180),
)
def test_manually_specify_primary_key(self):
# You can manually specify the primary key when creating a new object.
a101 = Article(
id=101,
headline="Article 101",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a101.save()
a101 = Article.objects.get(pk=101)
self.assertEqual(a101.headline, "Article 101")
def test_create_method(self):
# You can create saved objects in a single step
a10 = Article.objects.create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
self.assertEqual(Article.objects.get(headline="Article 10"), a10)
def test_year_lookup_edge_case(self):
# Edge-case test: A year lookup should retrieve all objects in
# the given year, including Jan. 1 and Dec. 31.
a11 = Article.objects.create(
headline="Article 11",
pub_date=datetime(2008, 1, 1),
)
a12 = Article.objects.create(
headline="Article 12",
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__year=2008),
[a11, a12],
)
def test_unicode_data(self):
# Unicode data works, too.
a = Article(
headline="\u6797\u539f \u3081\u3050\u307f",
pub_date=datetime(2005, 7, 28),
)
a.save()
self.assertEqual(
Article.objects.get(pk=a.id).headline, "\u6797\u539f \u3081\u3050\u307f"
)
def test_hash_function(self):
# Model instances have a hash function, so they can be used in sets
# or as dictionary keys. Two models compare as equal if their primary
# keys are equal.
a10 = Article.objects.create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a11 = Article.objects.create(
headline="Article 11",
pub_date=datetime(2008, 1, 1),
)
a12 = Article.objects.create(
headline="Article 12",
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
s = {a10, a11, a12}
self.assertIn(Article.objects.get(headline="Article 11"), s)
def test_extra_method_select_argument_with_dashes_and_values(self):
# The 'select' argument to extra() supports names with dashes in
# them, as long as you use values().
Article.objects.bulk_create(
[
Article(
headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45)
),
Article(headline="Article 11", pub_date=datetime(2008, 1, 1)),
Article(
headline="Article 12",
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
),
]
)
dicts = (
Article.objects.filter(pub_date__year=2008)
.extra(select={"dashed-value": "1"})
.values("headline", "dashed-value")
)
self.assertEqual(
[sorted(d.items()) for d in dicts],
[
[("dashed-value", 1), ("headline", "Article 11")],
[("dashed-value", 1), ("headline", "Article 12")],
],
)
def test_extra_method_select_argument_with_dashes(self):
# If you use 'select' with extra() and names containing dashes on a
# query that's *not* a values() query, those extra 'select' values
# will silently be ignored.
Article.objects.bulk_create(
[
Article(
headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45)
),
Article(headline="Article 11", pub_date=datetime(2008, 1, 1)),
Article(
headline="Article 12",
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
),
]
)
articles = Article.objects.filter(pub_date__year=2008).extra(
select={"dashed-value": "1", "undashedvalue": "2"}
)
self.assertEqual(articles[0].undashedvalue, 2)
def test_create_relation_with_gettext_lazy(self):
"""
gettext_lazy objects work when saving model instances
through various methods. Refs #10498.
"""
notlazy = "test"
lazy = gettext_lazy(notlazy)
Article.objects.create(headline=lazy, pub_date=datetime.now())
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
# test that assign + save works with Promise objects
article.headline = lazy
article.save()
self.assertEqual(article.headline, notlazy)
# test .update()
Article.objects.update(headline=lazy)
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
# still test bulk_create()
Article.objects.all().delete()
Article.objects.bulk_create([Article(headline=lazy, pub_date=datetime.now())])
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
def test_emptyqs(self):
msg = "EmptyQuerySet can't be instantiated"
with self.assertRaisesMessage(TypeError, msg):
EmptyQuerySet()
self.assertIsInstance(Article.objects.none(), EmptyQuerySet)
self.assertNotIsInstance("", EmptyQuerySet)
def test_emptyqs_values(self):
# test for #15959
Article.objects.create(headline="foo", pub_date=datetime.now())
with self.assertNumQueries(0):
qs = Article.objects.none().values_list("pk")
self.assertIsInstance(qs, EmptyQuerySet)
self.assertEqual(len(qs), 0)
def test_emptyqs_customqs(self):
# A hacky test for custom QuerySet subclass - refs #17271
Article.objects.create(headline="foo", pub_date=datetime.now())
class CustomQuerySet(models.QuerySet):
def do_something(self):
return "did something"
qs = Article.objects.all()
qs.__class__ = CustomQuerySet
qs = qs.none()
with self.assertNumQueries(0):
self.assertEqual(len(qs), 0)
self.assertIsInstance(qs, EmptyQuerySet)
self.assertEqual(qs.do_something(), "did something")
def test_emptyqs_values_order(self):
# Tests for ticket #17712
Article.objects.create(headline="foo", pub_date=datetime.now())
with self.assertNumQueries(0):
self.assertEqual(
len(Article.objects.none().values_list("id").order_by("id")), 0
)
with self.assertNumQueries(0):
self.assertEqual(
len(
Article.objects.none().filter(
id__in=Article.objects.values_list("id", flat=True)
)
),
0,
)
@skipUnlessDBFeature("can_distinct_on_fields")
def test_emptyqs_distinct(self):
# Tests for #19426
Article.objects.create(headline="foo", pub_date=datetime.now())
with self.assertNumQueries(0):
self.assertEqual(
len(Article.objects.none().distinct("headline", "pub_date")), 0
)
def test_ticket_20278(self):
sr = SelfRef.objects.create()
with self.assertRaises(ObjectDoesNotExist):
SelfRef.objects.get(selfref=sr)
def test_eq(self):
self.assertEqual(Article(id=1), Article(id=1))
self.assertNotEqual(Article(id=1), object())
self.assertNotEqual(object(), Article(id=1))
a = Article()
self.assertEqual(a, a)
self.assertEqual(a, mock.ANY)
self.assertNotEqual(Article(), a)
def test_hash(self):
# Value based on PK
self.assertEqual(hash(Article(id=1)), hash(1))
msg = "Model instances without primary key value are unhashable"
with self.assertRaisesMessage(TypeError, msg):
# No PK value -> unhashable (because save() would then change
# hash)
hash(Article())
def test_missing_hash_not_inherited(self):
class NoHash(models.Model):
def __eq__(self, other):
return super.__eq__(other)
with self.assertRaisesMessage(TypeError, "unhashable type: 'NoHash'"):
hash(NoHash(id=1))
def test_specified_parent_hash_inherited(self):
class ParentHash(models.Model):
def __eq__(self, other):
return super.__eq__(other)
__hash__ = models.Model.__hash__
self.assertEqual(hash(ParentHash(id=1)), 1)
def test_delete_and_access_field(self):
# Accessing a field after it's deleted from a model reloads its value.
pub_date = datetime.now()
article = Article.objects.create(headline="foo", pub_date=pub_date)
new_pub_date = article.pub_date + timedelta(days=10)
article.headline = "bar"
article.pub_date = new_pub_date
del article.headline
with self.assertNumQueries(1):
self.assertEqual(article.headline, "foo")
# Fields that weren't deleted aren't reloaded.
self.assertEqual(article.pub_date, new_pub_date)
def test_multiple_objects_max_num_fetched(self):
max_results = MAX_GET_RESULTS - 1
Article.objects.bulk_create(
Article(headline="Area %s" % i, pub_date=datetime(2005, 7, 28))
for i in range(max_results)
)
self.assertRaisesMessage(
MultipleObjectsReturned,
"get() returned more than one Article -- it returned %d!" % max_results,
Article.objects.get,
headline__startswith="Area",
)
Article.objects.create(
headline="Area %s" % max_results, pub_date=datetime(2005, 7, 28)
)
self.assertRaisesMessage(
MultipleObjectsReturned,
"get() returned more than one Article -- it returned more than %d!"
% max_results,
Article.objects.get,
headline__startswith="Area",
)
class ModelLookupTest(TestCase):
@classmethod
def setUpTestData(cls):
# Create an Article.
cls.a = Article(
id=None,
headline="Swallow programs in Python",
pub_date=datetime(2005, 7, 28),
)
# Save it into the database. You have to call save() explicitly.
cls.a.save()
def test_all_lookup(self):
# Change values by changing the attributes, then calling save().
self.a.headline = "Parrot programs in Python"
self.a.save()
# Article.objects.all() returns all the articles in the database.
self.assertSequenceEqual(Article.objects.all(), [self.a])
def test_rich_lookup(self):
# Django provides a rich database lookup API.
self.assertEqual(Article.objects.get(id__exact=self.a.id), self.a)
self.assertEqual(Article.objects.get(headline__startswith="Swallow"), self.a)
self.assertEqual(Article.objects.get(pub_date__year=2005), self.a)
self.assertEqual(
Article.objects.get(pub_date__year=2005, pub_date__month=7), self.a
)
self.assertEqual(
Article.objects.get(
pub_date__year=2005, pub_date__month=7, pub_date__day=28
),
self.a,
)
self.assertEqual(Article.objects.get(pub_date__week_day=5), self.a)
def test_equal_lookup(self):
# The "__exact" lookup type can be omitted, as a shortcut.
self.assertEqual(Article.objects.get(id=self.a.id), self.a)
self.assertEqual(
Article.objects.get(headline="Swallow programs in Python"), self.a
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__year=2005),
[self.a],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__year=2004),
[],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__year=2005, pub_date__month=7),
[self.a],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__week_day=5),
[self.a],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__week_day=6),
[],
)
def test_does_not_exist(self):
# Django raises an Article.DoesNotExist exception for get() if the
# parameters don't match any object.
with self.assertRaisesMessage(
ObjectDoesNotExist, "Article matching query does not exist."
):
Article.objects.get(
id__exact=2000,
)
# To avoid dict-ordering related errors check only one lookup
# in single assert.
with self.assertRaises(ObjectDoesNotExist):
Article.objects.get(pub_date__year=2005, pub_date__month=8)
with self.assertRaisesMessage(
ObjectDoesNotExist, "Article matching query does not exist."
):
Article.objects.get(
pub_date__week_day=6,
)
def test_lookup_by_primary_key(self):
# Lookup by a primary key is the most common case, so Django
# provides a shortcut for primary-key exact lookups.
# The following is identical to articles.get(id=a.id).
self.assertEqual(Article.objects.get(pk=self.a.id), self.a)
# pk can be used as a shortcut for the primary key name in any query.
self.assertSequenceEqual(Article.objects.filter(pk__in=[self.a.id]), [self.a])
# Model instances of the same type and same ID are considered equal.
a = Article.objects.get(pk=self.a.id)
b = Article.objects.get(pk=self.a.id)
self.assertEqual(a, b)
def test_too_many(self):
# Create a very similar object
a = Article(
id=None,
headline="Swallow bites Python",
pub_date=datetime(2005, 7, 28),
)
a.save()
self.assertEqual(Article.objects.count(), 2)
# Django raises an Article.MultipleObjectsReturned exception if the
# lookup matches more than one object
msg = "get() returned more than one Article -- it returned 2!"
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(
headline__startswith="Swallow",
)
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(
pub_date__year=2005,
)
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(pub_date__year=2005, pub_date__month=7)
class ConcurrentSaveTests(TransactionTestCase):
available_apps = ["basic"]
@skipUnlessDBFeature("test_db_allows_multiple_connections")
def test_concurrent_delete_with_save(self):
"""
Test fetching, deleting and finally saving an object - we should get
an insert in this case.
"""
a = Article.objects.create(headline="foo", pub_date=datetime.now())
exceptions = []
def deleter():
try:
# Do not delete a directly - doing so alters its state.
Article.objects.filter(pk=a.pk).delete()
except Exception as e:
exceptions.append(e)
finally:
connections[DEFAULT_DB_ALIAS].close()
self.assertEqual(len(exceptions), 0)
t = threading.Thread(target=deleter)
t.start()
t.join()
a.save()
self.assertEqual(Article.objects.get(pk=a.pk).headline, "foo")
class ManagerTest(SimpleTestCase):
QUERYSET_PROXY_METHODS = [
"none",
"count",
"dates",
"datetimes",
"distinct",
"extra",
"get",
"get_or_create",
"update_or_create",
"create",
"bulk_create",
"bulk_update",
"filter",
"aggregate",
"annotate",
"alias",
"complex_filter",
"exclude",
"in_bulk",
"iterator",
"earliest",
"latest",
"first",
"last",
"order_by",
"select_for_update",
"select_related",
"prefetch_related",
"values",
"values_list",
"update",
"reverse",
"defer",
"only",
"using",
"exists",
"contains",
"explain",
"_insert",
"_update",
"raw",
"union",
"intersection",
"difference",
"aaggregate",
"abulk_create",
"abulk_update",
"acontains",
"acount",
"acreate",
"aearliest",
"aexists",
"aexplain",
"afirst",
"aget",
"aget_or_create",
"ain_bulk",
"aiterator",
"alast",
"alatest",
"aupdate",
"aupdate_or_create",
]
def test_manager_methods(self):
"""
This test ensures that the correct set of methods from `QuerySet`
are copied onto `Manager`.
It's particularly useful to prevent accidentally leaking new methods
into `Manager`. New `QuerySet` methods that should also be copied onto
`Manager` will need to be added to `ManagerTest.QUERYSET_PROXY_METHODS`.
"""
self.assertEqual(
sorted(BaseManager._get_queryset_methods(models.QuerySet)),
sorted(self.QUERYSET_PROXY_METHODS),
)
class SelectOnSaveTests(TestCase):
def test_select_on_save(self):
a1 = Article.objects.create(pub_date=datetime.now())
with self.assertNumQueries(1):
a1.save()
asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now())
with self.assertNumQueries(2):
asos.save()
with self.assertNumQueries(1):
asos.save(force_update=True)
Article.objects.all().delete()
with self.assertRaisesMessage(
DatabaseError, "Forced update did not affect any rows."
):
with self.assertNumQueries(1):
asos.save(force_update=True)
def test_select_on_save_lying_update(self):
"""
select_on_save works correctly if the database doesn't return correct
information about matched rows from UPDATE.
"""
# Change the manager to not return "row matched" for update().
# We are going to change the Article's _base_manager class
# dynamically. This is a bit of a hack, but it seems hard to
# test this properly otherwise. Article's manager, because
# proxy models use their parent model's _base_manager.
orig_class = Article._base_manager._queryset_class
class FakeQuerySet(models.QuerySet):
# Make sure the _update method below is in fact called.
called = False
def _update(self, *args, **kwargs):
FakeQuerySet.called = True
super()._update(*args, **kwargs)
return 0
try:
Article._base_manager._queryset_class = FakeQuerySet
asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now())
with self.assertNumQueries(3):
asos.save()
self.assertTrue(FakeQuerySet.called)
# This is not wanted behavior, but this is how Django has always
# behaved for databases that do not return correct information
# about matched rows for UPDATE.
with self.assertRaisesMessage(
DatabaseError, "Forced update did not affect any rows."
):
asos.save(force_update=True)
msg = (
"An error occurred in the current transaction. You can't "
"execute queries until the end of the 'atomic' block."
)
with self.assertRaisesMessage(DatabaseError, msg):
asos.save(update_fields=["pub_date"])
finally:
Article._base_manager._queryset_class = orig_class
class ModelRefreshTests(TestCase):
def test_refresh(self):
a = Article.objects.create(pub_date=datetime.now())
Article.objects.create(pub_date=datetime.now())
Article.objects.filter(pk=a.pk).update(headline="new headline")
with self.assertNumQueries(1):
a.refresh_from_db()
self.assertEqual(a.headline, "new headline")
orig_pub_date = a.pub_date
new_pub_date = a.pub_date + timedelta(10)
Article.objects.update(headline="new headline 2", pub_date=new_pub_date)
with self.assertNumQueries(1):
a.refresh_from_db(fields=["headline"])
self.assertEqual(a.headline, "new headline 2")
self.assertEqual(a.pub_date, orig_pub_date)
with self.assertNumQueries(1):
a.refresh_from_db()
self.assertEqual(a.pub_date, new_pub_date)
def test_unknown_kwarg(self):
s = SelfRef.objects.create()
msg = "refresh_from_db() got an unexpected keyword argument 'unknown_kwarg'"
with self.assertRaisesMessage(TypeError, msg):
s.refresh_from_db(unknown_kwarg=10)
def test_lookup_in_fields(self):
s = SelfRef.objects.create()
msg = (
'Found "__" in fields argument. Relations and transforms are not allowed '
"in fields."
)
with self.assertRaisesMessage(ValueError, msg):
s.refresh_from_db(fields=["foo__bar"])
def test_refresh_fk(self):
s1 = SelfRef.objects.create()
s2 = SelfRef.objects.create()
s3 = SelfRef.objects.create(selfref=s1)
s3_copy = SelfRef.objects.get(pk=s3.pk)
s3_copy.selfref.touched = True
s3.selfref = s2
s3.save()
with self.assertNumQueries(1):
s3_copy.refresh_from_db()
with self.assertNumQueries(1):
# The old related instance was thrown away (the selfref_id has
# changed). It needs to be reloaded on access, so one query
# executed.
self.assertFalse(hasattr(s3_copy.selfref, "touched"))
self.assertEqual(s3_copy.selfref, s2)
def test_refresh_null_fk(self):
s1 = SelfRef.objects.create()
s2 = SelfRef.objects.create(selfref=s1)
s2.selfref = None
s2.refresh_from_db()
self.assertEqual(s2.selfref, s1)
def test_refresh_unsaved(self):
pub_date = datetime.now()
a = Article.objects.create(pub_date=pub_date)
a2 = Article(id=a.pk)
with self.assertNumQueries(1):
a2.refresh_from_db()
self.assertEqual(a2.pub_date, pub_date)
self.assertEqual(a2._state.db, "default")
def test_refresh_fk_on_delete_set_null(self):
a = Article.objects.create(
headline="Parrot programs in Python",
pub_date=datetime(2005, 7, 28),
)
s1 = SelfRef.objects.create(article=a)
a.delete()
s1.refresh_from_db()
self.assertIsNone(s1.article_id)
self.assertIsNone(s1.article)
def test_refresh_no_fields(self):
a = Article.objects.create(pub_date=datetime.now())
with self.assertNumQueries(0):
a.refresh_from_db(fields=[])
def test_refresh_clears_reverse_related(self):
"""refresh_from_db() clear cached reverse relations."""
article = Article.objects.create(
headline="Parrot programs in Python",
pub_date=datetime(2005, 7, 28),
)
self.assertFalse(hasattr(article, "featured"))
FeaturedArticle.objects.create(article_id=article.pk)
article.refresh_from_db()
self.assertTrue(hasattr(article, "featured"))
def test_refresh_clears_one_to_one_field(self):
article = Article.objects.create(
headline="Parrot programs in Python",
pub_date=datetime(2005, 7, 28),
)
featured = FeaturedArticle.objects.create(article_id=article.pk)
self.assertEqual(featured.article.headline, "Parrot programs in Python")
article.headline = "Parrot programs in Python 2.0"
article.save()
featured.refresh_from_db()
self.assertEqual(featured.article.headline, "Parrot programs in Python 2.0")
def test_prefetched_cache_cleared(self):
a = Article.objects.create(pub_date=datetime(2005, 7, 28))
s = SelfRef.objects.create(article=a)
# refresh_from_db() without fields=[...]
a1_prefetched = Article.objects.prefetch_related("selfref_set").first()
self.assertCountEqual(a1_prefetched.selfref_set.all(), [s])
s.article = None
s.save()
# Relation is cleared and prefetch cache is stale.
self.assertCountEqual(a1_prefetched.selfref_set.all(), [s])
a1_prefetched.refresh_from_db()
# Cache was cleared and new results are available.
self.assertCountEqual(a1_prefetched.selfref_set.all(), [])
# refresh_from_db() with fields=[...]
a2_prefetched = Article.objects.prefetch_related("selfref_set").first()
self.assertCountEqual(a2_prefetched.selfref_set.all(), [])
s.article = a
s.save()
# Relation is added and prefetch cache is stale.
self.assertCountEqual(a2_prefetched.selfref_set.all(), [])
a2_prefetched.refresh_from_db(fields=["selfref_set"])
# Cache was cleared and new results are available.
self.assertCountEqual(a2_prefetched.selfref_set.all(), [s])
|
f04119828ce382c8c3fd595ad7c5df67f8a75d451fcada2e0e89d42f16b8adc5 | from django.core import serializers
from django.db import connection
from django.test import TestCase
from .models import (
Child,
FKAsPKNoNaturalKey,
FKDataNaturalKey,
NaturalKeyAnchor,
NaturalKeyThing,
NaturalPKWithDefault,
)
from .tests import register_tests
class NaturalKeySerializerTests(TestCase):
pass
def natural_key_serializer_test(self, format):
# Create all the objects defined in the test data
with connection.constraint_checks_disabled():
objects = [
NaturalKeyAnchor.objects.create(id=1100, data="Natural Key Anghor"),
FKDataNaturalKey.objects.create(id=1101, data_id=1100),
FKDataNaturalKey.objects.create(id=1102, data_id=None),
]
# Serialize the test database
serialized_data = serializers.serialize(
format, objects, indent=2, use_natural_foreign_keys=True
)
for obj in serializers.deserialize(format, serialized_data):
obj.save()
# Assert that the deserialized data is the same
# as the original source
for obj in objects:
instance = obj.__class__.objects.get(id=obj.pk)
self.assertEqual(
obj.data,
instance.data,
"Objects with PK=%d not equal; expected '%s' (%s), got '%s' (%s)"
% (
obj.pk,
obj.data,
type(obj.data),
instance,
type(instance.data),
),
)
def natural_key_test(self, format):
book1 = {
"data": "978-1590597255",
"title": "The Definitive Guide to Django: Web Development Done Right",
}
book2 = {"data": "978-1590599969", "title": "Practical Django Projects"}
# Create the books.
adrian = NaturalKeyAnchor.objects.create(**book1)
james = NaturalKeyAnchor.objects.create(**book2)
# Serialize the books.
string_data = serializers.serialize(
format,
NaturalKeyAnchor.objects.all(),
indent=2,
use_natural_foreign_keys=True,
use_natural_primary_keys=True,
)
# Delete one book (to prove that the natural key generation will only
# restore the primary keys of books found in the database via the
# get_natural_key manager method).
james.delete()
# Deserialize and test.
books = list(serializers.deserialize(format, string_data))
self.assertCountEqual(
[(book.object.title, book.object.pk) for book in books],
[
(book1["title"], adrian.pk),
(book2["title"], None),
],
)
def natural_pk_mti_test(self, format):
"""
If serializing objects in a multi-table inheritance relationship using
natural primary keys, the natural foreign key for the parent is output in
the fields of the child so it's possible to relate the child to the parent
when deserializing.
"""
child_1 = Child.objects.create(parent_data="1", child_data="1")
child_2 = Child.objects.create(parent_data="2", child_data="2")
string_data = serializers.serialize(
format,
[child_1.parent_ptr, child_2.parent_ptr, child_2, child_1],
use_natural_foreign_keys=True,
use_natural_primary_keys=True,
)
child_1.delete()
child_2.delete()
for obj in serializers.deserialize(format, string_data):
obj.save()
children = Child.objects.all()
self.assertEqual(len(children), 2)
for child in children:
# If it's possible to find the superclass from the subclass and it's
# the correct superclass, it's working.
self.assertEqual(child.child_data, child.parent_data)
def forward_ref_fk_test(self, format):
t1 = NaturalKeyThing.objects.create(key="t1")
t2 = NaturalKeyThing.objects.create(key="t2", other_thing=t1)
t1.other_thing = t2
t1.save()
string_data = serializers.serialize(
format,
[t1, t2],
use_natural_primary_keys=True,
use_natural_foreign_keys=True,
)
NaturalKeyThing.objects.all().delete()
objs_with_deferred_fields = []
for obj in serializers.deserialize(
format, string_data, handle_forward_references=True
):
obj.save()
if obj.deferred_fields:
objs_with_deferred_fields.append(obj)
for obj in objs_with_deferred_fields:
obj.save_deferred_fields()
t1 = NaturalKeyThing.objects.get(key="t1")
t2 = NaturalKeyThing.objects.get(key="t2")
self.assertEqual(t1.other_thing, t2)
self.assertEqual(t2.other_thing, t1)
def forward_ref_fk_with_error_test(self, format):
t1 = NaturalKeyThing.objects.create(key="t1")
t2 = NaturalKeyThing.objects.create(key="t2", other_thing=t1)
t1.other_thing = t2
t1.save()
string_data = serializers.serialize(
format,
[t1],
use_natural_primary_keys=True,
use_natural_foreign_keys=True,
)
NaturalKeyThing.objects.all().delete()
objs_with_deferred_fields = []
for obj in serializers.deserialize(
format, string_data, handle_forward_references=True
):
obj.save()
if obj.deferred_fields:
objs_with_deferred_fields.append(obj)
obj = objs_with_deferred_fields[0]
msg = "NaturalKeyThing matching query does not exist"
with self.assertRaisesMessage(serializers.base.DeserializationError, msg):
obj.save_deferred_fields()
def forward_ref_m2m_test(self, format):
t1 = NaturalKeyThing.objects.create(key="t1")
t2 = NaturalKeyThing.objects.create(key="t2")
t3 = NaturalKeyThing.objects.create(key="t3")
t1.other_things.set([t2, t3])
string_data = serializers.serialize(
format,
[t1, t2, t3],
use_natural_primary_keys=True,
use_natural_foreign_keys=True,
)
NaturalKeyThing.objects.all().delete()
objs_with_deferred_fields = []
for obj in serializers.deserialize(
format, string_data, handle_forward_references=True
):
obj.save()
if obj.deferred_fields:
objs_with_deferred_fields.append(obj)
for obj in objs_with_deferred_fields:
obj.save_deferred_fields()
t1 = NaturalKeyThing.objects.get(key="t1")
t2 = NaturalKeyThing.objects.get(key="t2")
t3 = NaturalKeyThing.objects.get(key="t3")
self.assertCountEqual(t1.other_things.all(), [t2, t3])
def forward_ref_m2m_with_error_test(self, format):
t1 = NaturalKeyThing.objects.create(key="t1")
t2 = NaturalKeyThing.objects.create(key="t2")
t3 = NaturalKeyThing.objects.create(key="t3")
t1.other_things.set([t2, t3])
t1.save()
string_data = serializers.serialize(
format,
[t1, t2],
use_natural_primary_keys=True,
use_natural_foreign_keys=True,
)
NaturalKeyThing.objects.all().delete()
objs_with_deferred_fields = []
for obj in serializers.deserialize(
format, string_data, handle_forward_references=True
):
obj.save()
if obj.deferred_fields:
objs_with_deferred_fields.append(obj)
obj = objs_with_deferred_fields[0]
msg = "NaturalKeyThing matching query does not exist"
with self.assertRaisesMessage(serializers.base.DeserializationError, msg):
obj.save_deferred_fields()
def pk_with_default(self, format):
"""
The deserializer works with natural keys when the primary key has a default
value.
"""
obj = NaturalPKWithDefault.objects.create(name="name")
string_data = serializers.serialize(
format,
NaturalPKWithDefault.objects.all(),
use_natural_foreign_keys=True,
use_natural_primary_keys=True,
)
objs = list(serializers.deserialize(format, string_data))
self.assertEqual(len(objs), 1)
self.assertEqual(objs[0].object.pk, obj.pk)
def fk_as_pk_natural_key_not_called(self, format):
"""
The deserializer doesn't rely on natural keys when a model has a custom
primary key that is a ForeignKey.
"""
o1 = NaturalKeyAnchor.objects.create(data="978-1590599969")
o2 = FKAsPKNoNaturalKey.objects.create(pk_fk=o1)
serialized_data = serializers.serialize(format, [o1, o2])
deserialized_objects = list(serializers.deserialize(format, serialized_data))
self.assertEqual(len(deserialized_objects), 2)
for obj in deserialized_objects:
self.assertEqual(obj.object.pk, o1.pk)
# Dynamically register tests for each serializer
register_tests(
NaturalKeySerializerTests,
"test_%s_natural_key_serializer",
natural_key_serializer_test,
)
register_tests(
NaturalKeySerializerTests, "test_%s_serializer_natural_keys", natural_key_test
)
register_tests(
NaturalKeySerializerTests, "test_%s_serializer_natural_pks_mti", natural_pk_mti_test
)
register_tests(
NaturalKeySerializerTests, "test_%s_forward_references_fks", forward_ref_fk_test
)
register_tests(
NaturalKeySerializerTests,
"test_%s_forward_references_fk_errors",
forward_ref_fk_with_error_test,
)
register_tests(
NaturalKeySerializerTests, "test_%s_forward_references_m2ms", forward_ref_m2m_test
)
register_tests(
NaturalKeySerializerTests,
"test_%s_forward_references_m2m_errors",
forward_ref_m2m_with_error_test,
)
register_tests(NaturalKeySerializerTests, "test_%s_pk_with_default", pk_with_default)
register_tests(
NaturalKeySerializerTests,
"test_%s_fk_as_pk_natural_key_not_called",
fk_as_pk_natural_key_not_called,
)
|
ec3f9eef6b5230b26f074e94a873e7ebe238a3647228016fd7d10cab9f338970 | from datetime import date
from django import forms
from django.contrib.admin.models import ADDITION, CHANGE, DELETION, LogEntry
from django.contrib.admin.options import (
HORIZONTAL,
VERTICAL,
ModelAdmin,
TabularInline,
get_content_type_for_model,
)
from django.contrib.admin.sites import AdminSite
from django.contrib.admin.widgets import (
AdminDateWidget,
AdminRadioSelect,
AutocompleteSelect,
AutocompleteSelectMultiple,
)
from django.contrib.auth.models import User
from django.db import models
from django.forms.widgets import Select
from django.test import SimpleTestCase, TestCase
from django.test.utils import isolate_apps
from .models import Band, Concert, Song
class MockRequest:
pass
class MockSuperUser:
def has_perm(self, perm, obj=None):
return True
request = MockRequest()
request.user = MockSuperUser()
class ModelAdminTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.band = Band.objects.create(
name="The Doors",
bio="",
sign_date=date(1965, 1, 1),
)
def setUp(self):
self.site = AdminSite()
def test_modeladmin_str(self):
ma = ModelAdmin(Band, self.site)
self.assertEqual(str(ma), "modeladmin.ModelAdmin")
def test_default_attributes(self):
ma = ModelAdmin(Band, self.site)
self.assertEqual(ma.actions, ())
self.assertEqual(ma.inlines, ())
# form/fields/fieldsets interaction ##############################
def test_default_fields(self):
ma = ModelAdmin(Band, self.site)
self.assertEqual(
list(ma.get_form(request).base_fields), ["name", "bio", "sign_date"]
)
self.assertEqual(list(ma.get_fields(request)), ["name", "bio", "sign_date"])
self.assertEqual(
list(ma.get_fields(request, self.band)), ["name", "bio", "sign_date"]
)
self.assertIsNone(ma.get_exclude(request, self.band))
def test_default_fieldsets(self):
# fieldsets_add and fieldsets_change should return a special data structure that
# is used in the templates. They should generate the "right thing" whether we
# have specified a custom form, the fields argument, or nothing at all.
#
# Here's the default case. There are no custom form_add/form_change methods,
# no fields argument, and no fieldsets argument.
ma = ModelAdmin(Band, self.site)
self.assertEqual(
ma.get_fieldsets(request),
[(None, {"fields": ["name", "bio", "sign_date"]})],
)
self.assertEqual(
ma.get_fieldsets(request, self.band),
[(None, {"fields": ["name", "bio", "sign_date"]})],
)
def test_get_fieldsets(self):
# get_fieldsets() is called when figuring out form fields (#18681).
class BandAdmin(ModelAdmin):
def get_fieldsets(self, request, obj=None):
return [(None, {"fields": ["name", "bio"]})]
ma = BandAdmin(Band, self.site)
form = ma.get_form(None)
self.assertEqual(form._meta.fields, ["name", "bio"])
class InlineBandAdmin(TabularInline):
model = Concert
fk_name = "main_band"
can_delete = False
def get_fieldsets(self, request, obj=None):
return [(None, {"fields": ["day", "transport"]})]
ma = InlineBandAdmin(Band, self.site)
form = ma.get_formset(None).form
self.assertEqual(form._meta.fields, ["day", "transport"])
def test_lookup_allowed_allows_nonexistent_lookup(self):
"""
A lookup_allowed allows a parameter whose field lookup doesn't exist.
(#21129).
"""
class BandAdmin(ModelAdmin):
fields = ["name"]
ma = BandAdmin(Band, self.site)
self.assertTrue(ma.lookup_allowed("name__nonexistent", "test_value"))
@isolate_apps("modeladmin")
def test_lookup_allowed_onetoone(self):
class Department(models.Model):
code = models.CharField(max_length=4, unique=True)
class Employee(models.Model):
department = models.ForeignKey(Department, models.CASCADE, to_field="code")
class EmployeeProfile(models.Model):
employee = models.OneToOneField(Employee, models.CASCADE)
class EmployeeInfo(models.Model):
employee = models.OneToOneField(Employee, models.CASCADE)
description = models.CharField(max_length=100)
class EmployeeProfileAdmin(ModelAdmin):
list_filter = [
"employee__employeeinfo__description",
"employee__department__code",
]
ma = EmployeeProfileAdmin(EmployeeProfile, self.site)
# Reverse OneToOneField
self.assertIs(
ma.lookup_allowed("employee__employeeinfo__description", "test_value"), True
)
# OneToOneField and ForeignKey
self.assertIs(
ma.lookup_allowed("employee__department__code", "test_value"), True
)
def test_field_arguments(self):
# If fields is specified, fieldsets_add and fieldsets_change should
# just stick the fields into a formsets structure and return it.
class BandAdmin(ModelAdmin):
fields = ["name"]
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_fields(request)), ["name"])
self.assertEqual(list(ma.get_fields(request, self.band)), ["name"])
self.assertEqual(ma.get_fieldsets(request), [(None, {"fields": ["name"]})])
self.assertEqual(
ma.get_fieldsets(request, self.band), [(None, {"fields": ["name"]})]
)
def test_field_arguments_restricted_on_form(self):
# If fields or fieldsets is specified, it should exclude fields on the
# Form class to the fields specified. This may cause errors to be
# raised in the db layer if required model fields aren't in fields/
# fieldsets, but that's preferable to ghost errors where a field in the
# Form class isn't being displayed because it's not in fields/fieldsets.
# Using `fields`.
class BandAdmin(ModelAdmin):
fields = ["name"]
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ["name"])
self.assertEqual(list(ma.get_form(request, self.band).base_fields), ["name"])
# Using `fieldsets`.
class BandAdmin(ModelAdmin):
fieldsets = [(None, {"fields": ["name"]})]
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ["name"])
self.assertEqual(list(ma.get_form(request, self.band).base_fields), ["name"])
# Using `exclude`.
class BandAdmin(ModelAdmin):
exclude = ["bio"]
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ["name", "sign_date"])
# You can also pass a tuple to `exclude`.
class BandAdmin(ModelAdmin):
exclude = ("bio",)
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ["name", "sign_date"])
# Using `fields` and `exclude`.
class BandAdmin(ModelAdmin):
fields = ["name", "bio"]
exclude = ["bio"]
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ["name"])
def test_custom_form_meta_exclude_with_readonly(self):
"""
The custom ModelForm's `Meta.exclude` is respected when used in
conjunction with `ModelAdmin.readonly_fields` and when no
`ModelAdmin.exclude` is defined (#14496).
"""
# With ModelAdmin
class AdminBandForm(forms.ModelForm):
class Meta:
model = Band
exclude = ["bio"]
class BandAdmin(ModelAdmin):
readonly_fields = ["name"]
form = AdminBandForm
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ["sign_date"])
# With InlineModelAdmin
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ["day"]
class ConcertInline(TabularInline):
readonly_fields = ["transport"]
form = AdminConcertForm
fk_name = "main_band"
model = Concert
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["main_band", "opening_band", "id", "DELETE"],
)
def test_custom_formfield_override_readonly(self):
class AdminBandForm(forms.ModelForm):
name = forms.CharField()
class Meta:
exclude = ()
model = Band
class BandAdmin(ModelAdmin):
form = AdminBandForm
readonly_fields = ["name"]
ma = BandAdmin(Band, self.site)
# `name` shouldn't appear in base_fields because it's part of
# readonly_fields.
self.assertEqual(list(ma.get_form(request).base_fields), ["bio", "sign_date"])
# But it should appear in get_fields()/fieldsets() so it can be
# displayed as read-only.
self.assertEqual(list(ma.get_fields(request)), ["bio", "sign_date", "name"])
self.assertEqual(
list(ma.get_fieldsets(request)),
[(None, {"fields": ["bio", "sign_date", "name"]})],
)
def test_custom_form_meta_exclude(self):
"""
The custom ModelForm's `Meta.exclude` is overridden if
`ModelAdmin.exclude` or `InlineModelAdmin.exclude` are defined (#14496).
"""
# With ModelAdmin
class AdminBandForm(forms.ModelForm):
class Meta:
model = Band
exclude = ["bio"]
class BandAdmin(ModelAdmin):
exclude = ["name"]
form = AdminBandForm
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ["bio", "sign_date"])
# With InlineModelAdmin
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ["day"]
class ConcertInline(TabularInline):
exclude = ["transport"]
form = AdminConcertForm
fk_name = "main_band"
model = Concert
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["main_band", "opening_band", "day", "id", "DELETE"],
)
def test_overriding_get_exclude(self):
class BandAdmin(ModelAdmin):
def get_exclude(self, request, obj=None):
return ["name"]
self.assertEqual(
list(BandAdmin(Band, self.site).get_form(request).base_fields),
["bio", "sign_date"],
)
def test_get_exclude_overrides_exclude(self):
class BandAdmin(ModelAdmin):
exclude = ["bio"]
def get_exclude(self, request, obj=None):
return ["name"]
self.assertEqual(
list(BandAdmin(Band, self.site).get_form(request).base_fields),
["bio", "sign_date"],
)
def test_get_exclude_takes_obj(self):
class BandAdmin(ModelAdmin):
def get_exclude(self, request, obj=None):
if obj:
return ["sign_date"]
return ["name"]
self.assertEqual(
list(BandAdmin(Band, self.site).get_form(request, self.band).base_fields),
["name", "bio"],
)
def test_custom_form_validation(self):
# If a form is specified, it should use it allowing custom validation
# to work properly. This won't break any of the admin widgets or media.
class AdminBandForm(forms.ModelForm):
delete = forms.BooleanField()
class BandAdmin(ModelAdmin):
form = AdminBandForm
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(ma.get_form(request).base_fields),
["name", "bio", "sign_date", "delete"],
)
self.assertEqual(
type(ma.get_form(request).base_fields["sign_date"].widget), AdminDateWidget
)
def test_form_exclude_kwarg_override(self):
"""
The `exclude` kwarg passed to `ModelAdmin.get_form()` overrides all
other declarations (#8999).
"""
class AdminBandForm(forms.ModelForm):
class Meta:
model = Band
exclude = ["name"]
class BandAdmin(ModelAdmin):
exclude = ["sign_date"]
form = AdminBandForm
def get_form(self, request, obj=None, **kwargs):
kwargs["exclude"] = ["bio"]
return super().get_form(request, obj, **kwargs)
ma = BandAdmin(Band, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ["name", "sign_date"])
def test_formset_exclude_kwarg_override(self):
"""
The `exclude` kwarg passed to `InlineModelAdmin.get_formset()`
overrides all other declarations (#8999).
"""
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ["day"]
class ConcertInline(TabularInline):
exclude = ["transport"]
form = AdminConcertForm
fk_name = "main_band"
model = Concert
def get_formset(self, request, obj=None, **kwargs):
kwargs["exclude"] = ["opening_band"]
return super().get_formset(request, obj, **kwargs)
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["main_band", "day", "transport", "id", "DELETE"],
)
def test_formset_overriding_get_exclude_with_form_fields(self):
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
fields = ["main_band", "opening_band", "day", "transport"]
class ConcertInline(TabularInline):
form = AdminConcertForm
fk_name = "main_band"
model = Concert
def get_exclude(self, request, obj=None):
return ["opening_band"]
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["main_band", "day", "transport", "id", "DELETE"],
)
def test_formset_overriding_get_exclude_with_form_exclude(self):
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ["day"]
class ConcertInline(TabularInline):
form = AdminConcertForm
fk_name = "main_band"
model = Concert
def get_exclude(self, request, obj=None):
return ["opening_band"]
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["main_band", "day", "transport", "id", "DELETE"],
)
def test_raw_id_fields_widget_override(self):
"""
The autocomplete_fields, raw_id_fields, and radio_fields widgets may
overridden by specifying a widget in get_formset().
"""
class ConcertInline(TabularInline):
model = Concert
fk_name = "main_band"
raw_id_fields = ("opening_band",)
def get_formset(self, request, obj=None, **kwargs):
kwargs["widgets"] = {"opening_band": Select}
return super().get_formset(request, obj, **kwargs)
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
band_widget = (
list(ma.get_formsets_with_inlines(request))[0][0]()
.forms[0]
.fields["opening_band"]
.widget
)
# Without the override this would be ForeignKeyRawIdWidget.
self.assertIsInstance(band_widget, Select)
def test_queryset_override(self):
# If the queryset of a ModelChoiceField in a custom form is overridden,
# RelatedFieldWidgetWrapper doesn't mess that up.
band2 = Band.objects.create(
name="The Beatles", bio="", sign_date=date(1962, 1, 1)
)
ma = ModelAdmin(Concert, self.site)
form = ma.get_form(request)()
self.assertHTMLEqual(
str(form["main_band"]),
'<div class="related-widget-wrapper" data-model-ref="band">'
'<select name="main_band" id="id_main_band" required>'
'<option value="" selected>---------</option>'
'<option value="%d">The Beatles</option>'
'<option value="%d">The Doors</option>'
"</select></div>" % (band2.id, self.band.id),
)
class AdminConcertForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields["main_band"].queryset = Band.objects.filter(
name="The Doors"
)
class ConcertAdminWithForm(ModelAdmin):
form = AdminConcertForm
ma = ConcertAdminWithForm(Concert, self.site)
form = ma.get_form(request)()
self.assertHTMLEqual(
str(form["main_band"]),
'<div class="related-widget-wrapper" data-model-ref="band">'
'<select name="main_band" id="id_main_band" required>'
'<option value="" selected>---------</option>'
'<option value="%d">The Doors</option>'
"</select></div>" % self.band.id,
)
def test_regression_for_ticket_15820(self):
"""
`obj` is passed from `InlineModelAdmin.get_fieldsets()` to
`InlineModelAdmin.get_formset()`.
"""
class CustomConcertForm(forms.ModelForm):
class Meta:
model = Concert
fields = ["day"]
class ConcertInline(TabularInline):
model = Concert
fk_name = "main_band"
def get_formset(self, request, obj=None, **kwargs):
if obj:
kwargs["form"] = CustomConcertForm
return super().get_formset(request, obj, **kwargs)
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
Concert.objects.create(main_band=self.band, opening_band=self.band, day=1)
ma = BandAdmin(Band, self.site)
inline_instances = ma.get_inline_instances(request)
fieldsets = list(inline_instances[0].get_fieldsets(request))
self.assertEqual(
fieldsets[0][1]["fields"], ["main_band", "opening_band", "day", "transport"]
)
fieldsets = list(
inline_instances[0].get_fieldsets(request, inline_instances[0].model)
)
self.assertEqual(fieldsets[0][1]["fields"], ["day"])
# radio_fields behavior ###########################################
def test_default_foreign_key_widget(self):
# First, without any radio_fields specified, the widgets for ForeignKey
# and fields with choices specified ought to be a basic Select widget.
# ForeignKey widgets in the admin are wrapped with RelatedFieldWidgetWrapper so
# they need to be handled properly when type checking. For Select fields, all of
# the choices lists have a first entry of dashes.
cma = ModelAdmin(Concert, self.site)
cmafa = cma.get_form(request)
self.assertEqual(type(cmafa.base_fields["main_band"].widget.widget), Select)
self.assertEqual(
list(cmafa.base_fields["main_band"].widget.choices),
[("", "---------"), (self.band.id, "The Doors")],
)
self.assertEqual(type(cmafa.base_fields["opening_band"].widget.widget), Select)
self.assertEqual(
list(cmafa.base_fields["opening_band"].widget.choices),
[("", "---------"), (self.band.id, "The Doors")],
)
self.assertEqual(type(cmafa.base_fields["day"].widget), Select)
self.assertEqual(
list(cmafa.base_fields["day"].widget.choices),
[("", "---------"), (1, "Fri"), (2, "Sat")],
)
self.assertEqual(type(cmafa.base_fields["transport"].widget), Select)
self.assertEqual(
list(cmafa.base_fields["transport"].widget.choices),
[("", "---------"), (1, "Plane"), (2, "Train"), (3, "Bus")],
)
def test_foreign_key_as_radio_field(self):
# Now specify all the fields as radio_fields. Widgets should now be
# RadioSelect, and the choices list should have a first entry of 'None' if
# blank=True for the model field. Finally, the widget should have the
# 'radiolist' attr, and 'inline' as well if the field is specified HORIZONTAL.
class ConcertAdmin(ModelAdmin):
radio_fields = {
"main_band": HORIZONTAL,
"opening_band": VERTICAL,
"day": VERTICAL,
"transport": HORIZONTAL,
}
cma = ConcertAdmin(Concert, self.site)
cmafa = cma.get_form(request)
self.assertEqual(
type(cmafa.base_fields["main_band"].widget.widget), AdminRadioSelect
)
self.assertEqual(
cmafa.base_fields["main_band"].widget.attrs, {"class": "radiolist inline"}
)
self.assertEqual(
list(cmafa.base_fields["main_band"].widget.choices),
[(self.band.id, "The Doors")],
)
self.assertEqual(
type(cmafa.base_fields["opening_band"].widget.widget), AdminRadioSelect
)
self.assertEqual(
cmafa.base_fields["opening_band"].widget.attrs, {"class": "radiolist"}
)
self.assertEqual(
list(cmafa.base_fields["opening_band"].widget.choices),
[("", "None"), (self.band.id, "The Doors")],
)
self.assertEqual(type(cmafa.base_fields["day"].widget), AdminRadioSelect)
self.assertEqual(cmafa.base_fields["day"].widget.attrs, {"class": "radiolist"})
self.assertEqual(
list(cmafa.base_fields["day"].widget.choices), [(1, "Fri"), (2, "Sat")]
)
self.assertEqual(type(cmafa.base_fields["transport"].widget), AdminRadioSelect)
self.assertEqual(
cmafa.base_fields["transport"].widget.attrs, {"class": "radiolist inline"}
)
self.assertEqual(
list(cmafa.base_fields["transport"].widget.choices),
[("", "None"), (1, "Plane"), (2, "Train"), (3, "Bus")],
)
class AdminConcertForm(forms.ModelForm):
class Meta:
model = Concert
exclude = ("transport",)
class ConcertAdmin(ModelAdmin):
form = AdminConcertForm
ma = ConcertAdmin(Concert, self.site)
self.assertEqual(
list(ma.get_form(request).base_fields), ["main_band", "opening_band", "day"]
)
class AdminConcertForm(forms.ModelForm):
extra = forms.CharField()
class Meta:
model = Concert
fields = ["extra", "transport"]
class ConcertAdmin(ModelAdmin):
form = AdminConcertForm
ma = ConcertAdmin(Concert, self.site)
self.assertEqual(list(ma.get_form(request).base_fields), ["extra", "transport"])
class ConcertInline(TabularInline):
form = AdminConcertForm
model = Concert
fk_name = "main_band"
can_delete = True
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["extra", "transport", "id", "DELETE", "main_band"],
)
def test_log_actions(self):
ma = ModelAdmin(Band, self.site)
mock_request = MockRequest()
mock_request.user = User.objects.create(username="bill")
content_type = get_content_type_for_model(self.band)
tests = (
(ma.log_addition, ADDITION, {"added": {}}),
(ma.log_change, CHANGE, {"changed": {"fields": ["name", "bio"]}}),
(ma.log_deletion, DELETION, str(self.band)),
)
for method, flag, message in tests:
with self.subTest(name=method.__name__):
created = method(mock_request, self.band, message)
fetched = LogEntry.objects.filter(action_flag=flag).latest("id")
self.assertEqual(created, fetched)
self.assertEqual(fetched.action_flag, flag)
self.assertEqual(fetched.content_type, content_type)
self.assertEqual(fetched.object_id, str(self.band.pk))
self.assertEqual(fetched.user, mock_request.user)
if flag == DELETION:
self.assertEqual(fetched.change_message, "")
self.assertEqual(fetched.object_repr, message)
else:
self.assertEqual(fetched.change_message, str(message))
self.assertEqual(fetched.object_repr, str(self.band))
def test_get_autocomplete_fields(self):
class NameAdmin(ModelAdmin):
search_fields = ["name"]
class SongAdmin(ModelAdmin):
autocomplete_fields = ["featuring"]
fields = ["featuring", "band"]
class OtherSongAdmin(SongAdmin):
def get_autocomplete_fields(self, request):
return ["band"]
self.site.register(Band, NameAdmin)
try:
# Uses autocomplete_fields if not overridden.
model_admin = SongAdmin(Song, self.site)
form = model_admin.get_form(request)()
self.assertIsInstance(
form.fields["featuring"].widget.widget, AutocompleteSelectMultiple
)
# Uses overridden get_autocomplete_fields
model_admin = OtherSongAdmin(Song, self.site)
form = model_admin.get_form(request)()
self.assertIsInstance(form.fields["band"].widget.widget, AutocompleteSelect)
finally:
self.site.unregister(Band)
def test_get_deleted_objects(self):
mock_request = MockRequest()
mock_request.user = User.objects.create_superuser(
username="bob", email="[email protected]", password="test"
)
self.site.register(Band, ModelAdmin)
ma = self.site._registry[Band]
(
deletable_objects,
model_count,
perms_needed,
protected,
) = ma.get_deleted_objects([self.band], request)
self.assertEqual(deletable_objects, ["Band: The Doors"])
self.assertEqual(model_count, {"bands": 1})
self.assertEqual(perms_needed, set())
self.assertEqual(protected, [])
def test_get_deleted_objects_with_custom_has_delete_permission(self):
"""
ModelAdmin.get_deleted_objects() uses ModelAdmin.has_delete_permission()
for permissions checking.
"""
mock_request = MockRequest()
mock_request.user = User.objects.create_superuser(
username="bob", email="[email protected]", password="test"
)
class TestModelAdmin(ModelAdmin):
def has_delete_permission(self, request, obj=None):
return False
self.site.register(Band, TestModelAdmin)
ma = self.site._registry[Band]
(
deletable_objects,
model_count,
perms_needed,
protected,
) = ma.get_deleted_objects([self.band], request)
self.assertEqual(deletable_objects, ["Band: The Doors"])
self.assertEqual(model_count, {"bands": 1})
self.assertEqual(perms_needed, {"band"})
self.assertEqual(protected, [])
def test_modeladmin_repr(self):
ma = ModelAdmin(Band, self.site)
self.assertEqual(
repr(ma),
"<ModelAdmin: model=Band site=AdminSite(name='admin')>",
)
class ModelAdminPermissionTests(SimpleTestCase):
class MockUser:
def has_module_perms(self, app_label):
return app_label == "modeladmin"
class MockViewUser(MockUser):
def has_perm(self, perm, obj=None):
return perm == "modeladmin.view_band"
class MockAddUser(MockUser):
def has_perm(self, perm, obj=None):
return perm == "modeladmin.add_band"
class MockChangeUser(MockUser):
def has_perm(self, perm, obj=None):
return perm == "modeladmin.change_band"
class MockDeleteUser(MockUser):
def has_perm(self, perm, obj=None):
return perm == "modeladmin.delete_band"
def test_has_view_permission(self):
"""
has_view_permission() returns True for users who can view objects and
False for users who can't.
"""
ma = ModelAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockViewUser()
self.assertIs(ma.has_view_permission(request), True)
request.user = self.MockAddUser()
self.assertIs(ma.has_view_permission(request), False)
request.user = self.MockChangeUser()
self.assertIs(ma.has_view_permission(request), True)
request.user = self.MockDeleteUser()
self.assertIs(ma.has_view_permission(request), False)
def test_has_add_permission(self):
"""
has_add_permission returns True for users who can add objects and
False for users who can't.
"""
ma = ModelAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockViewUser()
self.assertFalse(ma.has_add_permission(request))
request.user = self.MockAddUser()
self.assertTrue(ma.has_add_permission(request))
request.user = self.MockChangeUser()
self.assertFalse(ma.has_add_permission(request))
request.user = self.MockDeleteUser()
self.assertFalse(ma.has_add_permission(request))
def test_inline_has_add_permission_uses_obj(self):
class ConcertInline(TabularInline):
model = Concert
def has_add_permission(self, request, obj):
return bool(obj)
class BandAdmin(ModelAdmin):
inlines = [ConcertInline]
ma = BandAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockAddUser()
self.assertEqual(ma.get_inline_instances(request), [])
band = Band(name="The Doors", bio="", sign_date=date(1965, 1, 1))
inline_instances = ma.get_inline_instances(request, band)
self.assertEqual(len(inline_instances), 1)
self.assertIsInstance(inline_instances[0], ConcertInline)
def test_has_change_permission(self):
"""
has_change_permission returns True for users who can edit objects and
False for users who can't.
"""
ma = ModelAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockViewUser()
self.assertIs(ma.has_change_permission(request), False)
request.user = self.MockAddUser()
self.assertFalse(ma.has_change_permission(request))
request.user = self.MockChangeUser()
self.assertTrue(ma.has_change_permission(request))
request.user = self.MockDeleteUser()
self.assertFalse(ma.has_change_permission(request))
def test_has_delete_permission(self):
"""
has_delete_permission returns True for users who can delete objects and
False for users who can't.
"""
ma = ModelAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockViewUser()
self.assertIs(ma.has_delete_permission(request), False)
request.user = self.MockAddUser()
self.assertFalse(ma.has_delete_permission(request))
request.user = self.MockChangeUser()
self.assertFalse(ma.has_delete_permission(request))
request.user = self.MockDeleteUser()
self.assertTrue(ma.has_delete_permission(request))
def test_has_module_permission(self):
"""
as_module_permission returns True for users who have any permission
for the module and False for users who don't.
"""
ma = ModelAdmin(Band, AdminSite())
request = MockRequest()
request.user = self.MockViewUser()
self.assertIs(ma.has_module_permission(request), True)
request.user = self.MockAddUser()
self.assertTrue(ma.has_module_permission(request))
request.user = self.MockChangeUser()
self.assertTrue(ma.has_module_permission(request))
request.user = self.MockDeleteUser()
self.assertTrue(ma.has_module_permission(request))
original_app_label = ma.opts.app_label
ma.opts.app_label = "anotherapp"
try:
request.user = self.MockViewUser()
self.assertIs(ma.has_module_permission(request), False)
request.user = self.MockAddUser()
self.assertFalse(ma.has_module_permission(request))
request.user = self.MockChangeUser()
self.assertFalse(ma.has_module_permission(request))
request.user = self.MockDeleteUser()
self.assertFalse(ma.has_module_permission(request))
finally:
ma.opts.app_label = original_app_label
|
b75d4dc6388598497da9159db519b86718a9bf69f7a4969ea481883dbf9ebb0e | import datetime
import pickle
import sys
import unittest
from operator import attrgetter
from threading import Lock
from django.core.exceptions import EmptyResultSet, FieldError
from django.db import DEFAULT_DB_ALIAS, connection
from django.db.models import Count, Exists, F, Max, OuterRef, Q
from django.db.models.expressions import RawSQL
from django.db.models.sql.constants import LOUTER
from django.db.models.sql.where import NothingNode, WhereNode
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext, ignore_warnings
from django.utils.deprecation import RemovedInDjango50Warning
from .models import (
FK1,
Annotation,
Article,
Author,
BaseA,
BaseUser,
Book,
CategoryItem,
CategoryRelationship,
Celebrity,
Channel,
Chapter,
Child,
ChildObjectA,
Classroom,
CommonMixedCaseForeignKeys,
Company,
Cover,
CustomPk,
CustomPkTag,
DateTimePK,
Detail,
DumbCategory,
Eaten,
Employment,
ExtraInfo,
Fan,
Food,
Identifier,
Individual,
Item,
Job,
JobResponsibilities,
Join,
LeafA,
LeafB,
LoopX,
LoopZ,
ManagedModel,
Member,
MixedCaseDbColumnCategoryItem,
MixedCaseFieldCategoryItem,
ModelA,
ModelB,
ModelC,
ModelD,
MyObject,
NamedCategory,
Node,
Note,
NullableName,
Number,
ObjectA,
ObjectB,
ObjectC,
OneToOneCategory,
Order,
OrderItem,
Page,
Paragraph,
Person,
Plaything,
PointerA,
Program,
ProxyCategory,
ProxyObjectA,
ProxyObjectB,
Ranking,
Related,
RelatedIndividual,
RelatedObject,
Report,
ReportComment,
ReservedName,
Responsibility,
School,
SharedConnection,
SimpleCategory,
SingleObject,
SpecialCategory,
Staff,
StaffUser,
Student,
Tag,
Task,
Teacher,
Ticket21203Child,
Ticket21203Parent,
Ticket23605A,
Ticket23605B,
Ticket23605C,
TvChef,
Valid,
X,
)
class Queries1Tests(TestCase):
@classmethod
def setUpTestData(cls):
cls.nc1 = generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name="t1", category=generic)
cls.t2 = Tag.objects.create(name="t2", parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name="t3", parent=cls.t1)
cls.t4 = Tag.objects.create(name="t4", parent=cls.t3)
cls.t5 = Tag.objects.create(name="t5", parent=cls.t3)
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.n2 = Note.objects.create(note="n2", misc="bar", id=2)
cls.n3 = Note.objects.create(note="n3", misc="foo", id=3, negate=False)
cls.ann1 = Annotation.objects.create(name="a1", tag=cls.t1)
cls.ann1.notes.add(cls.n1)
ann2 = Annotation.objects.create(name="a2", tag=cls.t4)
ann2.notes.add(cls.n2, cls.n3)
# Create these out of order so that sorting by 'id' will be different to sorting
# by 'info'. Helps detect some problems later.
cls.e2 = ExtraInfo.objects.create(
info="e2", note=cls.n2, value=41, filterable=False
)
e1 = ExtraInfo.objects.create(info="e1", note=cls.n1, value=42)
cls.a1 = Author.objects.create(name="a1", num=1001, extra=e1)
cls.a2 = Author.objects.create(name="a2", num=2002, extra=e1)
cls.a3 = Author.objects.create(name="a3", num=3003, extra=cls.e2)
cls.a4 = Author.objects.create(name="a4", num=4004, extra=cls.e2)
cls.time1 = datetime.datetime(2007, 12, 19, 22, 25, 0)
cls.time2 = datetime.datetime(2007, 12, 19, 21, 0, 0)
time3 = datetime.datetime(2007, 12, 20, 22, 25, 0)
time4 = datetime.datetime(2007, 12, 20, 21, 0, 0)
cls.i1 = Item.objects.create(
name="one",
created=cls.time1,
modified=cls.time1,
creator=cls.a1,
note=cls.n3,
)
cls.i1.tags.set([cls.t1, cls.t2])
cls.i2 = Item.objects.create(
name="two", created=cls.time2, creator=cls.a2, note=cls.n2
)
cls.i2.tags.set([cls.t1, cls.t3])
cls.i3 = Item.objects.create(
name="three", created=time3, creator=cls.a2, note=cls.n3
)
cls.i4 = Item.objects.create(
name="four", created=time4, creator=cls.a4, note=cls.n3
)
cls.i4.tags.set([cls.t4])
cls.r1 = Report.objects.create(name="r1", creator=cls.a1)
cls.r2 = Report.objects.create(name="r2", creator=cls.a3)
cls.r3 = Report.objects.create(name="r3")
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering
# will be rank3, rank2, rank1.
cls.rank1 = Ranking.objects.create(rank=2, author=cls.a2)
cls.c1 = Cover.objects.create(title="first", item=cls.i4)
cls.c2 = Cover.objects.create(title="second", item=cls.i2)
def test_subquery_condition(self):
qs1 = Tag.objects.filter(pk__lte=0)
qs2 = Tag.objects.filter(parent__in=qs1)
qs3 = Tag.objects.filter(parent__in=qs2)
self.assertEqual(qs3.query.subq_aliases, {"T", "U", "V"})
self.assertIn("v0", str(qs3.query).lower())
qs4 = qs3.filter(parent__in=qs1)
self.assertEqual(qs4.query.subq_aliases, {"T", "U", "V"})
# It is possible to reuse U for the second subquery, no need to use W.
self.assertNotIn("w0", str(qs4.query).lower())
# So, 'U0."id"' is referenced in SELECT and WHERE twice.
self.assertEqual(str(qs4.query).lower().count("u0."), 4)
def test_ticket1050(self):
self.assertSequenceEqual(
Item.objects.filter(tags__isnull=True),
[self.i3],
)
self.assertSequenceEqual(
Item.objects.filter(tags__id__isnull=True),
[self.i3],
)
def test_ticket1801(self):
self.assertSequenceEqual(
Author.objects.filter(item=self.i2),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(item=self.i3),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(item=self.i2) & Author.objects.filter(item=self.i3),
[self.a2],
)
def test_ticket2306(self):
# Checking that no join types are "left outer" joins.
query = Item.objects.filter(tags=self.t2).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).order_by("name"),
[self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(tags=self.t2)),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1)).filter(
Q(creator__name="fred") | Q(tags=self.t2)
),
[self.i1],
)
# Each filter call is processed "at once" against a single table, so this is
# different from the previous example as it tries to find tags that are two
# things at once (rather than two tags).
self.assertSequenceEqual(
Item.objects.filter(Q(tags=self.t1) & Q(tags=self.t2)), []
)
self.assertSequenceEqual(
Item.objects.filter(
Q(tags=self.t1), Q(creator__name="fred") | Q(tags=self.t2)
),
[],
)
qs = Author.objects.filter(ranking__rank=2, ranking__id=self.rank1.id)
self.assertSequenceEqual(list(qs), [self.a2])
self.assertEqual(2, qs.query.count_active_tables(), 2)
qs = Author.objects.filter(ranking__rank=2).filter(ranking__id=self.rank1.id)
self.assertEqual(qs.query.count_active_tables(), 3)
def test_ticket4464(self):
self.assertSequenceEqual(
Item.objects.filter(tags=self.t1).filter(tags=self.t2),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2])
.distinct()
.order_by("name"),
[self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).filter(tags=self.t3),
[self.i2],
)
# Make sure .distinct() works with slicing (this was broken in Oracle).
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).order_by("name")[:3],
[self.i1, self.i1, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(tags__in=[self.t1, self.t2])
.distinct()
.order_by("name")[:3],
[self.i1, self.i2],
)
def test_tickets_2080_3592(self):
self.assertSequenceEqual(
Author.objects.filter(item__name="one") | Author.objects.filter(name="a3"),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(item__name="one") | Q(name="a3")),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(name="a3") | Q(item__name="one")),
[self.a1, self.a3],
)
self.assertSequenceEqual(
Author.objects.filter(Q(item__name="three") | Q(report__name="r3")),
[self.a2],
)
def test_ticket6074(self):
# Merging two empty result sets shouldn't leave a queryset with no constraints
# (which would match everything).
self.assertSequenceEqual(Author.objects.filter(Q(id__in=[])), [])
self.assertSequenceEqual(Author.objects.filter(Q(id__in=[]) | Q(id__in=[])), [])
def test_tickets_1878_2939(self):
self.assertEqual(Item.objects.values("creator").distinct().count(), 3)
# Create something with a duplicate 'name' so that we can test multi-column
# cases (which require some tricky SQL transformations under the covers).
xx = Item(name="four", created=self.time1, creator=self.a2, note=self.n1)
xx.save()
self.assertEqual(
Item.objects.exclude(name="two")
.values("creator", "name")
.distinct()
.count(),
4,
)
self.assertEqual(
(
Item.objects.exclude(name="two")
.extra(select={"foo": "%s"}, select_params=(1,))
.values("creator", "name", "foo")
.distinct()
.count()
),
4,
)
self.assertEqual(
(
Item.objects.exclude(name="two")
.extra(select={"foo": "%s"}, select_params=(1,))
.values("creator", "name")
.distinct()
.count()
),
4,
)
xx.delete()
def test_ticket7323(self):
self.assertEqual(Item.objects.values("creator", "name").count(), 4)
def test_ticket2253(self):
q1 = Item.objects.order_by("name")
q2 = Item.objects.filter(id=self.i1.id)
self.assertSequenceEqual(q1, [self.i4, self.i1, self.i3, self.i2])
self.assertSequenceEqual(q2, [self.i1])
self.assertSequenceEqual(
(q1 | q2).order_by("name"),
[self.i4, self.i1, self.i3, self.i2],
)
self.assertSequenceEqual((q1 & q2).order_by("name"), [self.i1])
q1 = Item.objects.filter(tags=self.t1)
q2 = Item.objects.filter(note=self.n3, tags=self.t2)
q3 = Item.objects.filter(creator=self.a4)
self.assertSequenceEqual(
((q1 & q2) | q3).order_by("name"),
[self.i4, self.i1],
)
def test_order_by_tables(self):
q1 = Item.objects.order_by("name")
q2 = Item.objects.filter(id=self.i1.id)
list(q2)
combined_query = (q1 & q2).order_by("name").query
self.assertEqual(
len(
[
t
for t in combined_query.alias_map
if combined_query.alias_refcount[t]
]
),
1,
)
def test_order_by_join_unref(self):
"""
This test is related to the above one, testing that there aren't
old JOINs in the query.
"""
qs = Celebrity.objects.order_by("greatest_fan__fan_of")
self.assertIn("OUTER JOIN", str(qs.query))
qs = qs.order_by("id")
self.assertNotIn("OUTER JOIN", str(qs.query))
def test_get_clears_ordering(self):
"""
get() should clear ordering for optimization purposes.
"""
with CaptureQueriesContext(connection) as captured_queries:
Author.objects.order_by("name").get(pk=self.a1.pk)
self.assertNotIn("order by", captured_queries[0]["sql"].lower())
def test_tickets_4088_4306(self):
self.assertSequenceEqual(Report.objects.filter(creator=1001), [self.r1])
self.assertSequenceEqual(Report.objects.filter(creator__num=1001), [self.r1])
self.assertSequenceEqual(Report.objects.filter(creator__id=1001), [])
self.assertSequenceEqual(
Report.objects.filter(creator__id=self.a1.id), [self.r1]
)
self.assertSequenceEqual(Report.objects.filter(creator__name="a1"), [self.r1])
def test_ticket4510(self):
self.assertSequenceEqual(
Author.objects.filter(report__name="r1"),
[self.a1],
)
def test_ticket7378(self):
self.assertSequenceEqual(self.a1.report_set.all(), [self.r1])
def test_tickets_5324_6704(self):
self.assertSequenceEqual(
Item.objects.filter(tags__name="t4"),
[self.i4],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t4").order_by("name").distinct(),
[self.i1, self.i3, self.i2],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t4").order_by("name").distinct().reverse(),
[self.i2, self.i3, self.i1],
)
self.assertSequenceEqual(
Author.objects.exclude(item__name="one").distinct().order_by("name"),
[self.a2, self.a3, self.a4],
)
# Excluding across a m2m relation when there is more than one related
# object associated was problematic.
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t1").order_by("name"),
[self.i4, self.i3],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t1").exclude(tags__name="t4"),
[self.i3],
)
# Excluding from a relation that cannot be NULL should not use outer joins.
query = Item.objects.exclude(creator__in=[self.a1, self.a2]).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
# Similarly, when one of the joins cannot possibly, ever, involve NULL
# values (Author -> ExtraInfo, in the following), it should never be
# promoted to a left outer join. So the following query should only
# involve one "left outer" join (Author -> Item is 0-to-many).
qs = Author.objects.filter(id=self.a1.id).filter(
Q(extra__note=self.n1) | Q(item__note=self.n3)
)
self.assertEqual(
len(
[
x
for x in qs.query.alias_map.values()
if x.join_type == LOUTER and qs.query.alias_refcount[x.table_alias]
]
),
1,
)
# The previous changes shouldn't affect nullable foreign key joins.
self.assertSequenceEqual(
Tag.objects.filter(parent__isnull=True).order_by("name"), [self.t1]
)
self.assertSequenceEqual(
Tag.objects.exclude(parent__isnull=True).order_by("name"),
[self.t2, self.t3, self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__name="t1") | Q(parent__isnull=True)).order_by(
"name"
),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__isnull=True) | Q(parent__name="t1")).order_by(
"name"
),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.exclude(Q(parent__parent__isnull=True)).order_by("name"),
[self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.filter(~Q(parent__parent__isnull=True)).order_by("name"),
[self.t4, self.t5],
)
def test_ticket2091(self):
t = Tag.objects.get(name="t4")
self.assertSequenceEqual(Item.objects.filter(tags__in=[t]), [self.i4])
def test_avoid_infinite_loop_on_too_many_subqueries(self):
x = Tag.objects.filter(pk=1)
local_recursion_limit = sys.getrecursionlimit() // 16
msg = "Maximum recursion depth exceeded: too many subqueries."
with self.assertRaisesMessage(RecursionError, msg):
for i in range(local_recursion_limit + 2):
x = Tag.objects.filter(pk__in=x)
def test_reasonable_number_of_subq_aliases(self):
x = Tag.objects.filter(pk=1)
for _ in range(20):
x = Tag.objects.filter(pk__in=x)
self.assertEqual(
x.query.subq_aliases,
{
"T",
"U",
"V",
"W",
"X",
"Y",
"Z",
"AA",
"AB",
"AC",
"AD",
"AE",
"AF",
"AG",
"AH",
"AI",
"AJ",
"AK",
"AL",
"AM",
"AN",
},
)
def test_heterogeneous_qs_combination(self):
# Combining querysets built on different models should behave in a well-defined
# fashion. We raise an error.
msg = "Cannot combine queries on two different base models."
with self.assertRaisesMessage(TypeError, msg):
Author.objects.all() & Tag.objects.all()
with self.assertRaisesMessage(TypeError, msg):
Author.objects.all() | Tag.objects.all()
def test_ticket3141(self):
self.assertEqual(Author.objects.extra(select={"foo": "1"}).count(), 4)
self.assertEqual(
Author.objects.extra(select={"foo": "%s"}, select_params=(1,)).count(), 4
)
def test_ticket2400(self):
self.assertSequenceEqual(
Author.objects.filter(item__isnull=True),
[self.a3],
)
self.assertSequenceEqual(
Tag.objects.filter(item__isnull=True),
[self.t5],
)
def test_ticket2496(self):
self.assertSequenceEqual(
Item.objects.extra(tables=["queries_author"])
.select_related()
.order_by("name")[:1],
[self.i4],
)
def test_error_raised_on_filter_with_dictionary(self):
with self.assertRaisesMessage(FieldError, "Cannot parse keyword query as dict"):
Note.objects.filter({"note": "n1", "misc": "foo"})
def test_tickets_2076_7256(self):
# Ordering on related tables should be possible, even if the table is
# not otherwise involved.
self.assertSequenceEqual(
Item.objects.order_by("note__note", "name"),
[self.i2, self.i4, self.i1, self.i3],
)
# Ordering on a related field should use the remote model's default
# ordering as a final step.
self.assertSequenceEqual(
Author.objects.order_by("extra", "-name"),
[self.a2, self.a1, self.a4, self.a3],
)
# Using remote model default ordering can span multiple models (in this
# case, Cover is ordered by Item's default, which uses Note's default).
self.assertSequenceEqual(Cover.objects.all(), [self.c1, self.c2])
# If the remote model does not have a default ordering, we order by its 'id'
# field.
self.assertSequenceEqual(
Item.objects.order_by("creator", "name"),
[self.i1, self.i3, self.i2, self.i4],
)
# Ordering by a many-valued attribute (e.g. a many-to-many or reverse
# ForeignKey) is legal, but the results might not make sense. That
# isn't Django's problem. Garbage in, garbage out.
self.assertSequenceEqual(
Item.objects.filter(tags__isnull=False).order_by("tags", "id"),
[self.i1, self.i2, self.i1, self.i2, self.i4],
)
# If we replace the default ordering, Django adjusts the required
# tables automatically. Item normally requires a join with Note to do
# the default ordering, but that isn't needed here.
qs = Item.objects.order_by("name")
self.assertSequenceEqual(qs, [self.i4, self.i1, self.i3, self.i2])
self.assertEqual(len(qs.query.alias_map), 1)
def test_tickets_2874_3002(self):
qs = Item.objects.select_related().order_by("note__note", "name")
self.assertQuerysetEqual(qs, [self.i2, self.i4, self.i1, self.i3])
# This is also a good select_related() test because there are multiple
# Note entries in the SQL. The two Note items should be different.
self.assertEqual(repr(qs[0].note), "<Note: n2>")
self.assertEqual(repr(qs[0].creator.extra.note), "<Note: n1>")
def test_ticket3037(self):
self.assertSequenceEqual(
Item.objects.filter(
Q(creator__name="a3", name="two") | Q(creator__name="a4", name="four")
),
[self.i4],
)
def test_tickets_5321_7070(self):
# Ordering columns must be included in the output columns. Note that
# this means results that might otherwise be distinct are not (if there
# are multiple values in the ordering cols), as in this example. This
# isn't a bug; it's a warning to be careful with the selection of
# ordering columns.
self.assertSequenceEqual(
Note.objects.values("misc").distinct().order_by("note", "-misc"),
[{"misc": "foo"}, {"misc": "bar"}, {"misc": "foo"}],
)
def test_ticket4358(self):
# If you don't pass any fields to values(), relation fields are
# returned as "foo_id" keys, not "foo". For consistency, you should be
# able to pass "foo_id" in the fields list and have it work, too. We
# actually allow both "foo" and "foo_id".
# The *_id version is returned by default.
self.assertIn("note_id", ExtraInfo.objects.values()[0])
# You can also pass it in explicitly.
self.assertSequenceEqual(
ExtraInfo.objects.values("note_id"), [{"note_id": 1}, {"note_id": 2}]
)
# ...or use the field name.
self.assertSequenceEqual(
ExtraInfo.objects.values("note"), [{"note": 1}, {"note": 2}]
)
def test_ticket6154(self):
# Multiple filter statements are joined using "AND" all the time.
self.assertSequenceEqual(
Author.objects.filter(id=self.a1.id).filter(
Q(extra__note=self.n1) | Q(item__note=self.n3)
),
[self.a1],
)
self.assertSequenceEqual(
Author.objects.filter(
Q(extra__note=self.n1) | Q(item__note=self.n3)
).filter(id=self.a1.id),
[self.a1],
)
def test_ticket6981(self):
self.assertSequenceEqual(
Tag.objects.select_related("parent").order_by("name"),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
def test_ticket9926(self):
self.assertSequenceEqual(
Tag.objects.select_related("parent", "category").order_by("name"),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
self.assertSequenceEqual(
Tag.objects.select_related("parent", "parent__category").order_by("name"),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
def test_tickets_6180_6203(self):
# Dates with limits and/or counts
self.assertEqual(Item.objects.count(), 4)
self.assertEqual(Item.objects.datetimes("created", "month").count(), 1)
self.assertEqual(Item.objects.datetimes("created", "day").count(), 2)
self.assertEqual(len(Item.objects.datetimes("created", "day")), 2)
self.assertEqual(
Item.objects.datetimes("created", "day")[0],
datetime.datetime(2007, 12, 19, 0, 0),
)
def test_tickets_7087_12242(self):
# Dates with extra select columns
self.assertSequenceEqual(
Item.objects.datetimes("created", "day").extra(select={"a": 1}),
[
datetime.datetime(2007, 12, 19, 0, 0),
datetime.datetime(2007, 12, 20, 0, 0),
],
)
self.assertSequenceEqual(
Item.objects.extra(select={"a": 1}).datetimes("created", "day"),
[
datetime.datetime(2007, 12, 19, 0, 0),
datetime.datetime(2007, 12, 20, 0, 0),
],
)
name = "one"
self.assertSequenceEqual(
Item.objects.datetimes("created", "day").extra(
where=["name=%s"], params=[name]
),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
self.assertSequenceEqual(
Item.objects.extra(where=["name=%s"], params=[name]).datetimes(
"created", "day"
),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
def test_ticket7155(self):
# Nullable dates
self.assertSequenceEqual(
Item.objects.datetimes("modified", "day"),
[datetime.datetime(2007, 12, 19, 0, 0)],
)
def test_order_by_rawsql(self):
self.assertSequenceEqual(
Item.objects.values("note__note").order_by(
RawSQL("queries_note.note", ()),
"id",
),
[
{"note__note": "n2"},
{"note__note": "n3"},
{"note__note": "n3"},
{"note__note": "n3"},
],
)
def test_ticket7096(self):
# Make sure exclude() with multiple conditions continues to work.
self.assertSequenceEqual(
Tag.objects.filter(parent=self.t1, name="t3").order_by("name"),
[self.t3],
)
self.assertSequenceEqual(
Tag.objects.exclude(parent=self.t1, name="t3").order_by("name"),
[self.t1, self.t2, self.t4, self.t5],
)
self.assertSequenceEqual(
Item.objects.exclude(tags__name="t1", name="one")
.order_by("name")
.distinct(),
[self.i4, self.i3, self.i2],
)
self.assertSequenceEqual(
Item.objects.filter(name__in=["three", "four"])
.exclude(tags__name="t1")
.order_by("name"),
[self.i4, self.i3],
)
# More twisted cases, involving nested negations.
self.assertSequenceEqual(
Item.objects.exclude(~Q(tags__name="t1", name="one")),
[self.i1],
)
self.assertSequenceEqual(
Item.objects.filter(~Q(tags__name="t1", name="one"), name="two"),
[self.i2],
)
self.assertSequenceEqual(
Item.objects.exclude(~Q(tags__name="t1", name="one"), name="two"),
[self.i4, self.i1, self.i3],
)
def test_tickets_7204_7506(self):
# Make sure querysets with related fields can be pickled. If this
# doesn't crash, it's a Good Thing.
pickle.dumps(Item.objects.all())
def test_ticket7813(self):
# We should also be able to pickle things that use select_related().
# The only tricky thing here is to ensure that we do the related
# selections properly after unpickling.
qs = Item.objects.select_related()
query = qs.query.get_compiler(qs.db).as_sql()[0]
query2 = pickle.loads(pickle.dumps(qs.query))
self.assertEqual(query2.get_compiler(qs.db).as_sql()[0], query)
def test_deferred_load_qs_pickling(self):
# Check pickling of deferred-loading querysets
qs = Item.objects.defer("name", "creator")
q2 = pickle.loads(pickle.dumps(qs))
self.assertEqual(list(qs), list(q2))
q3 = pickle.loads(pickle.dumps(qs, pickle.HIGHEST_PROTOCOL))
self.assertEqual(list(qs), list(q3))
def test_ticket7277(self):
self.assertSequenceEqual(
self.n1.annotation_set.filter(
Q(tag=self.t5)
| Q(tag__children=self.t5)
| Q(tag__children__children=self.t5)
),
[self.ann1],
)
def test_tickets_7448_7707(self):
# Complex objects should be converted to strings before being used in
# lookups.
self.assertSequenceEqual(
Item.objects.filter(created__in=[self.time1, self.time2]),
[self.i1, self.i2],
)
def test_ticket7235(self):
# An EmptyQuerySet should not raise exceptions if it is filtered.
Eaten.objects.create(meal="m")
q = Eaten.objects.none()
with self.assertNumQueries(0):
self.assertQuerysetEqual(q.all(), [])
self.assertQuerysetEqual(q.filter(meal="m"), [])
self.assertQuerysetEqual(q.exclude(meal="m"), [])
self.assertQuerysetEqual(q.complex_filter({"pk": 1}), [])
self.assertQuerysetEqual(q.select_related("food"), [])
self.assertQuerysetEqual(q.annotate(Count("food")), [])
self.assertQuerysetEqual(q.order_by("meal", "food"), [])
self.assertQuerysetEqual(q.distinct(), [])
self.assertQuerysetEqual(q.extra(select={"foo": "1"}), [])
self.assertQuerysetEqual(q.reverse(), [])
q.query.low_mark = 1
msg = "Cannot change a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
q.extra(select={"foo": "1"})
self.assertQuerysetEqual(q.defer("meal"), [])
self.assertQuerysetEqual(q.only("meal"), [])
def test_ticket7791(self):
# There were "issues" when ordering and distinct-ing on fields related
# via ForeignKeys.
self.assertEqual(len(Note.objects.order_by("extrainfo__info").distinct()), 3)
# Pickling of QuerySets using datetimes() should work.
qs = Item.objects.datetimes("created", "month")
pickle.loads(pickle.dumps(qs))
def test_ticket9997(self):
# If a ValuesList or Values queryset is passed as an inner query, we
# make sure it's only requesting a single value and use that as the
# thing to select.
self.assertSequenceEqual(
Tag.objects.filter(
name__in=Tag.objects.filter(parent=self.t1).values("name")
),
[self.t2, self.t3],
)
# Multi-valued values() and values_list() querysets should raise errors.
with self.assertRaisesMessage(
TypeError, "Cannot use multi-field values as a filter value."
):
Tag.objects.filter(
name__in=Tag.objects.filter(parent=self.t1).values("name", "id")
)
with self.assertRaisesMessage(
TypeError, "Cannot use multi-field values as a filter value."
):
Tag.objects.filter(
name__in=Tag.objects.filter(parent=self.t1).values_list("name", "id")
)
def test_ticket9985(self):
# qs.values_list(...).values(...) combinations should work.
self.assertSequenceEqual(
Note.objects.values_list("note", flat=True).values("id").order_by("id"),
[{"id": 1}, {"id": 2}, {"id": 3}],
)
self.assertSequenceEqual(
Annotation.objects.filter(
notes__in=Note.objects.filter(note="n1")
.values_list("note")
.values("id")
),
[self.ann1],
)
def test_ticket10205(self):
# When bailing out early because of an empty "__in" filter, we need
# to set things up correctly internally so that subqueries can continue
# properly.
self.assertEqual(Tag.objects.filter(name__in=()).update(name="foo"), 0)
def test_ticket10432(self):
# Testing an empty "__in" filter with a generator as the value.
def f():
return iter([])
n_obj = Note.objects.all()[0]
def g():
yield n_obj.pk
self.assertQuerysetEqual(Note.objects.filter(pk__in=f()), [])
self.assertEqual(list(Note.objects.filter(pk__in=g())), [n_obj])
def test_ticket10742(self):
# Queries used in an __in clause don't execute subqueries
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.filter(pk__in=subq)
self.assertSequenceEqual(qs, [self.a1, self.a2])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.exclude(pk__in=subq)
self.assertSequenceEqual(qs, [self.a3, self.a4])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
self.assertSequenceEqual(
Author.objects.filter(Q(pk__in=subq) & Q(name="a1")),
[self.a1],
)
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
def test_ticket7076(self):
# Excluding shouldn't eliminate NULL entries.
self.assertSequenceEqual(
Item.objects.exclude(modified=self.time1).order_by("name"),
[self.i4, self.i3, self.i2],
)
self.assertSequenceEqual(
Tag.objects.exclude(parent__name=self.t1.name),
[self.t1, self.t4, self.t5],
)
def test_ticket7181(self):
# Ordering by related tables should accommodate nullable fields (this
# test is a little tricky, since NULL ordering is database dependent.
# Instead, we just count the number of results).
self.assertEqual(len(Tag.objects.order_by("parent__name")), 5)
# Empty querysets can be merged with others.
self.assertSequenceEqual(
Note.objects.none() | Note.objects.all(),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(
Note.objects.all() | Note.objects.none(),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(Note.objects.none() & Note.objects.all(), [])
self.assertSequenceEqual(Note.objects.all() & Note.objects.none(), [])
def test_ticket8439(self):
# Complex combinations of conjunctions, disjunctions and nullable
# relations.
self.assertSequenceEqual(
Author.objects.filter(
Q(item__note__extrainfo=self.e2) | Q(report=self.r1, name="xyz")
),
[self.a2],
)
self.assertSequenceEqual(
Author.objects.filter(
Q(report=self.r1, name="xyz") | Q(item__note__extrainfo=self.e2)
),
[self.a2],
)
self.assertSequenceEqual(
Annotation.objects.filter(
Q(tag__parent=self.t1) | Q(notes__note="n1", name="a1")
),
[self.ann1],
)
xx = ExtraInfo.objects.create(info="xx", note=self.n3)
self.assertSequenceEqual(
Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)),
[self.n1, self.n3],
)
q = Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)).query
self.assertEqual(
len(
[
x
for x in q.alias_map.values()
if x.join_type == LOUTER and q.alias_refcount[x.table_alias]
]
),
1,
)
def test_ticket17429(self):
"""
Meta.ordering=None works the same as Meta.ordering=[]
"""
original_ordering = Tag._meta.ordering
Tag._meta.ordering = None
try:
self.assertCountEqual(
Tag.objects.all(),
[self.t1, self.t2, self.t3, self.t4, self.t5],
)
finally:
Tag._meta.ordering = original_ordering
def test_exclude(self):
self.assertQuerysetEqual(
Item.objects.exclude(tags__name="t4"),
Item.objects.filter(~Q(tags__name="t4")),
)
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name="t4") | Q(tags__name="t3")),
Item.objects.filter(~(Q(tags__name="t4") | Q(tags__name="t3"))),
)
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name="t4") | ~Q(tags__name="t3")),
Item.objects.filter(~(Q(tags__name="t4") | ~Q(tags__name="t3"))),
)
def test_nested_exclude(self):
self.assertQuerysetEqual(
Item.objects.exclude(~Q(tags__name="t4")),
Item.objects.filter(~~Q(tags__name="t4")),
)
def test_double_exclude(self):
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name="t4")),
Item.objects.filter(~~Q(tags__name="t4")),
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name="t4")),
Item.objects.filter(~Q(~Q(tags__name="t4"))),
)
def test_exclude_in(self):
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name__in=["t4", "t3"])),
Item.objects.filter(~Q(tags__name__in=["t4", "t3"])),
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name__in=["t4", "t3"])),
Item.objects.filter(~~Q(tags__name__in=["t4", "t3"])),
)
def test_ticket_10790_1(self):
# Querying direct fields with isnull should trim the left outer join.
# It also should not create INNER JOIN.
q = Tag.objects.filter(parent__isnull=True)
self.assertSequenceEqual(q, [self.t1])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.filter(parent__isnull=False)
self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.exclude(parent__isnull=True)
self.assertSequenceEqual(q, [self.t2, self.t3, self.t4, self.t5])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.exclude(parent__isnull=False)
self.assertSequenceEqual(q, [self.t1])
self.assertNotIn("JOIN", str(q.query))
q = Tag.objects.exclude(parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.t1, self.t2, self.t3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
self.assertNotIn("INNER JOIN", str(q.query))
def test_ticket_10790_2(self):
# Querying across several tables should strip only the last outer join,
# while preserving the preceding inner joins.
q = Tag.objects.filter(parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.t4, self.t5])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
# Querying without isnull should not convert anything to left outer join.
q = Tag.objects.filter(parent__parent=self.t1)
self.assertSequenceEqual(q, [self.t4, self.t5])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
def test_ticket_10790_3(self):
# Querying via indirect fields should populate the left outer join
q = NamedCategory.objects.filter(tag__isnull=True)
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
# join to dumbcategory ptr_id
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
self.assertSequenceEqual(q, [])
# Querying across several tables should strip only the last join, while
# preserving the preceding left outer joins.
q = NamedCategory.objects.filter(tag__parent__isnull=True)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
self.assertSequenceEqual(q, [self.nc1])
def test_ticket_10790_4(self):
# Querying across m2m field should not strip the m2m table from join.
q = Author.objects.filter(item__tags__isnull=True)
self.assertSequenceEqual(q, [self.a2, self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 2)
self.assertNotIn("INNER JOIN", str(q.query))
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 3)
self.assertNotIn("INNER JOIN", str(q.query))
def test_ticket_10790_5(self):
# Querying with isnull=False across m2m field should not create outer joins
q = Author.objects.filter(item__tags__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a1, self.a2, self.a2, self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 2)
q = Author.objects.filter(item__tags__parent__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 3)
q = Author.objects.filter(item__tags__parent__parent__isnull=False)
self.assertSequenceEqual(q, [self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 4)
def test_ticket_10790_6(self):
# Querying with isnull=True across m2m field should not create inner joins
# and strip last outer join
q = Author.objects.filter(item__tags__parent__parent__isnull=True)
self.assertSequenceEqual(
q,
[self.a1, self.a1, self.a2, self.a2, self.a2, self.a3],
)
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 4)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 3)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
def test_ticket_10790_7(self):
# Reverse querying with isnull should not strip the join
q = Author.objects.filter(item__isnull=True)
self.assertSequenceEqual(q, [self.a3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
q = Author.objects.filter(item__isnull=False)
self.assertSequenceEqual(q, [self.a1, self.a2, self.a2, self.a4])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 1)
def test_ticket_10790_8(self):
# Querying with combined q-objects should also strip the left outer join
q = Tag.objects.filter(Q(parent__isnull=True) | Q(parent=self.t1))
self.assertSequenceEqual(q, [self.t1, self.t2, self.t3])
self.assertEqual(str(q.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q.query).count("INNER JOIN"), 0)
def test_ticket_10790_combine(self):
# Combining queries should not re-populate the left outer join
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__isnull=False)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3, self.t4, self.t5])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q3 = q1 & q2
self.assertSequenceEqual(q3, [])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q2 = Tag.objects.filter(parent=self.t1)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q3 = q2 | q1
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__parent__isnull=True)
q3 = q1 | q2
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
q3 = q2 | q1
self.assertSequenceEqual(q3, [self.t1, self.t2, self.t3])
self.assertEqual(str(q3.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(q3.query).count("INNER JOIN"), 0)
def test_ticket19672(self):
self.assertSequenceEqual(
Report.objects.filter(
Q(creator__isnull=False) & ~Q(creator__extra__value=41)
),
[self.r1],
)
def test_ticket_20250(self):
# A negated Q along with an annotated queryset failed in Django 1.4
qs = Author.objects.annotate(Count("item"))
qs = qs.filter(~Q(extra__value=0)).order_by("name")
self.assertIn("SELECT", str(qs.query))
self.assertSequenceEqual(qs, [self.a1, self.a2, self.a3, self.a4])
def test_lookup_constraint_fielderror(self):
msg = (
"Cannot resolve keyword 'unknown_field' into field. Choices are: "
"annotation, category, category_id, children, id, item, "
"managedmodel, name, note, parent, parent_id"
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.filter(unknown_field__name="generic")
def test_common_mixed_case_foreign_keys(self):
"""
Valid query should be generated when fields fetched from joined tables
include FKs whose names only differ by case.
"""
c1 = SimpleCategory.objects.create(name="c1")
c2 = SimpleCategory.objects.create(name="c2")
c3 = SimpleCategory.objects.create(name="c3")
category = CategoryItem.objects.create(category=c1)
mixed_case_field_category = MixedCaseFieldCategoryItem.objects.create(
CaTeGoRy=c2
)
mixed_case_db_column_category = MixedCaseDbColumnCategoryItem.objects.create(
category=c3
)
CommonMixedCaseForeignKeys.objects.create(
category=category,
mixed_case_field_category=mixed_case_field_category,
mixed_case_db_column_category=mixed_case_db_column_category,
)
qs = CommonMixedCaseForeignKeys.objects.values(
"category",
"mixed_case_field_category",
"mixed_case_db_column_category",
"category__category",
"mixed_case_field_category__CaTeGoRy",
"mixed_case_db_column_category__category",
)
self.assertTrue(qs.first())
def test_excluded_intermediary_m2m_table_joined(self):
self.assertSequenceEqual(
Note.objects.filter(~Q(tag__annotation__name=F("note"))),
[self.n1, self.n2, self.n3],
)
self.assertSequenceEqual(
Note.objects.filter(tag__annotation__name="a1").filter(
~Q(tag__annotation__name=F("note"))
),
[],
)
def test_field_with_filterable(self):
self.assertSequenceEqual(
Author.objects.filter(extra=self.e2),
[self.a3, self.a4],
)
def test_negate_field(self):
self.assertSequenceEqual(
Note.objects.filter(negate=True),
[self.n1, self.n2],
)
self.assertSequenceEqual(Note.objects.exclude(negate=True), [self.n3])
class Queries2Tests(TestCase):
@classmethod
def setUpTestData(cls):
cls.num4 = Number.objects.create(num=4)
cls.num8 = Number.objects.create(num=8)
cls.num12 = Number.objects.create(num=12)
def test_ticket4289(self):
# A slight variation on the restricting the filtering choices by the
# lookup constraints.
self.assertSequenceEqual(Number.objects.filter(num__lt=4), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=8, num__lt=12), [])
self.assertSequenceEqual(
Number.objects.filter(num__gt=8, num__lt=13),
[self.num12],
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__lt=4) | Q(num__gt=8, num__lt=12)), []
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=8, num__lt=12) | Q(num__lt=4)), []
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=8) & Q(num__lt=12) | Q(num__lt=4)), []
)
self.assertSequenceEqual(
Number.objects.filter(Q(num__gt=7) & Q(num__lt=12) | Q(num__lt=4)),
[self.num8],
)
def test_ticket12239(self):
# Custom lookups are registered to round float values correctly on gte
# and lt IntegerField queries.
self.assertSequenceEqual(
Number.objects.filter(num__gt=11.9),
[self.num12],
)
self.assertSequenceEqual(Number.objects.filter(num__gt=12), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=12.0), [])
self.assertSequenceEqual(Number.objects.filter(num__gt=12.1), [])
self.assertCountEqual(
Number.objects.filter(num__lt=12),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lt=12.0),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lt=12.1),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=11.9),
[self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=12),
[self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__gte=12.0),
[self.num12],
)
self.assertSequenceEqual(Number.objects.filter(num__gte=12.1), [])
self.assertSequenceEqual(Number.objects.filter(num__gte=12.9), [])
self.assertCountEqual(
Number.objects.filter(num__lte=11.9),
[self.num4, self.num8],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.0),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.1),
[self.num4, self.num8, self.num12],
)
self.assertCountEqual(
Number.objects.filter(num__lte=12.9),
[self.num4, self.num8, self.num12],
)
def test_ticket7759(self):
# Count should work with a partially read result set.
count = Number.objects.count()
qs = Number.objects.all()
def run():
for obj in qs:
return qs.count() == count
self.assertTrue(run())
class Queries3Tests(TestCase):
def test_ticket7107(self):
# This shouldn't create an infinite loop.
self.assertQuerysetEqual(Valid.objects.all(), [])
def test_datetimes_invalid_field(self):
# An error should be raised when QuerySet.datetimes() is passed the
# wrong type of field.
msg = "'name' isn't a DateField, TimeField, or DateTimeField."
with self.assertRaisesMessage(TypeError, msg):
Item.objects.datetimes("name", "month")
def test_ticket22023(self):
with self.assertRaisesMessage(
TypeError, "Cannot call only() after .values() or .values_list()"
):
Valid.objects.values().only()
with self.assertRaisesMessage(
TypeError, "Cannot call defer() after .values() or .values_list()"
):
Valid.objects.values().defer()
class Queries4Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name="t1", category=generic)
n1 = Note.objects.create(note="n1", misc="foo")
n2 = Note.objects.create(note="n2", misc="bar")
e1 = ExtraInfo.objects.create(info="e1", note=n1)
e2 = ExtraInfo.objects.create(info="e2", note=n2)
cls.a1 = Author.objects.create(name="a1", num=1001, extra=e1)
cls.a3 = Author.objects.create(name="a3", num=3003, extra=e2)
cls.r1 = Report.objects.create(name="r1", creator=cls.a1)
cls.r2 = Report.objects.create(name="r2", creator=cls.a3)
cls.r3 = Report.objects.create(name="r3")
cls.i1 = Item.objects.create(
name="i1", created=datetime.datetime.now(), note=n1, creator=cls.a1
)
cls.i2 = Item.objects.create(
name="i2", created=datetime.datetime.now(), note=n1, creator=cls.a3
)
def test_ticket24525(self):
tag = Tag.objects.create()
anth100 = tag.note_set.create(note="ANTH", misc="100")
math101 = tag.note_set.create(note="MATH", misc="101")
s1 = tag.annotation_set.create(name="1")
s2 = tag.annotation_set.create(name="2")
s1.notes.set([math101, anth100])
s2.notes.set([math101])
result = math101.annotation_set.all() & tag.annotation_set.exclude(
notes__in=[anth100]
)
self.assertEqual(list(result), [s2])
def test_ticket11811(self):
unsaved_category = NamedCategory(name="Other")
msg = (
"Unsaved model instance <NamedCategory: Other> cannot be used in an ORM "
"query."
)
with self.assertRaisesMessage(ValueError, msg):
Tag.objects.filter(pk=self.t1.pk).update(category=unsaved_category)
def test_ticket14876(self):
# Note: when combining the query we need to have information available
# about the join type of the trimmed "creator__isnull" join. If we
# don't have that information, then the join is created as INNER JOIN
# and results will be incorrect.
q1 = Report.objects.filter(
Q(creator__isnull=True) | Q(creator__extra__info="e1")
)
q2 = Report.objects.filter(Q(creator__isnull=True)) | Report.objects.filter(
Q(creator__extra__info="e1")
)
self.assertCountEqual(q1, [self.r1, self.r3])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Report.objects.filter(
Q(creator__extra__info="e1") | Q(creator__isnull=True)
)
q2 = Report.objects.filter(
Q(creator__extra__info="e1")
) | Report.objects.filter(Q(creator__isnull=True))
self.assertCountEqual(q1, [self.r1, self.r3])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(
Q(creator=self.a1) | Q(creator__report__name="r1")
).order_by()
q2 = (
Item.objects.filter(Q(creator=self.a1)).order_by()
| Item.objects.filter(Q(creator__report__name="r1")).order_by()
)
self.assertCountEqual(q1, [self.i1])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(
Q(creator__report__name="e1") | Q(creator=self.a1)
).order_by()
q2 = (
Item.objects.filter(Q(creator__report__name="e1")).order_by()
| Item.objects.filter(Q(creator=self.a1)).order_by()
)
self.assertCountEqual(q1, [self.i1])
self.assertEqual(str(q1.query), str(q2.query))
def test_combine_join_reuse(self):
# Joins having identical connections are correctly recreated in the
# rhs query, in case the query is ORed together (#18748).
Report.objects.create(name="r4", creator=self.a1)
q1 = Author.objects.filter(report__name="r5")
q2 = Author.objects.filter(report__name="r4").filter(report__name="r1")
combined = q1 | q2
self.assertEqual(str(combined.query).count("JOIN"), 2)
self.assertEqual(len(combined), 1)
self.assertEqual(combined[0].name, "a1")
def test_combine_or_filter_reuse(self):
combined = Author.objects.filter(name="a1") | Author.objects.filter(name="a3")
self.assertEqual(combined.get(name="a1"), self.a1)
def test_join_reuse_order(self):
# Join aliases are reused in order. This shouldn't raise AssertionError
# because change_map contains a circular reference (#26522).
s1 = School.objects.create()
s2 = School.objects.create()
s3 = School.objects.create()
t1 = Teacher.objects.create()
otherteachers = Teacher.objects.exclude(pk=t1.pk).exclude(friends=t1)
qs1 = otherteachers.filter(schools=s1).filter(schools=s2)
qs2 = otherteachers.filter(schools=s1).filter(schools=s3)
self.assertQuerysetEqual(qs1 | qs2, [])
def test_ticket7095(self):
# Updates that are filtered on the model being updated are somewhat
# tricky in MySQL.
ManagedModel.objects.create(data="mm1", tag=self.t1, public=True)
self.assertEqual(ManagedModel.objects.update(data="mm"), 1)
# A values() or values_list() query across joined models must use outer
# joins appropriately.
# Note: In Oracle, we expect a null CharField to return '' instead of
# None.
if connection.features.interprets_empty_strings_as_nulls:
expected_null_charfield_repr = ""
else:
expected_null_charfield_repr = None
self.assertSequenceEqual(
Report.objects.values_list("creator__extra__info", flat=True).order_by(
"name"
),
["e1", "e2", expected_null_charfield_repr],
)
# Similarly for select_related(), joins beyond an initial nullable join
# must use outer joins so that all results are included.
self.assertSequenceEqual(
Report.objects.select_related("creator", "creator__extra").order_by("name"),
[self.r1, self.r2, self.r3],
)
# When there are multiple paths to a table from another table, we have
# to be careful not to accidentally reuse an inappropriate join when
# using select_related(). We used to return the parent's Detail record
# here by mistake.
d1 = Detail.objects.create(data="d1")
d2 = Detail.objects.create(data="d2")
m1 = Member.objects.create(name="m1", details=d1)
m2 = Member.objects.create(name="m2", details=d2)
Child.objects.create(person=m2, parent=m1)
obj = m1.children.select_related("person__details")[0]
self.assertEqual(obj.person.details.data, "d2")
def test_order_by_resetting(self):
# Calling order_by() with no parameters removes any existing ordering on the
# model. But it should still be possible to add new ordering after that.
qs = Author.objects.order_by().order_by("name")
self.assertIn("ORDER BY", qs.query.get_compiler(qs.db).as_sql()[0])
def test_order_by_reverse_fk(self):
# It is possible to order by reverse of foreign key, although that can lead
# to duplicate results.
c1 = SimpleCategory.objects.create(name="category1")
c2 = SimpleCategory.objects.create(name="category2")
CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c1)
self.assertSequenceEqual(
SimpleCategory.objects.order_by("categoryitem", "pk"), [c1, c2, c1]
)
def test_filter_reverse_non_integer_pk(self):
date_obj = DateTimePK.objects.create()
extra_obj = ExtraInfo.objects.create(info="extra", date=date_obj)
self.assertEqual(
DateTimePK.objects.filter(extrainfo=extra_obj).get(),
date_obj,
)
def test_ticket10181(self):
# Avoid raising an EmptyResultSet if an inner query is probably
# empty (and hence, not executed).
self.assertQuerysetEqual(
Tag.objects.filter(id__in=Tag.objects.filter(id__in=[])), []
)
def test_ticket15316_filter_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 2)
self.assertCountEqual(qs, [ci2, ci3])
def test_ticket15316_exclude_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_filter_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_exclude_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(
name="named category1", special_name="special1"
)
c3 = SpecialCategory.objects.create(
name="named category2", special_name="special2"
)
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 2)
self.assertCountEqual(qs, [ci2, ci3])
def test_ticket15316_one2one_filter_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(
category__onetoonecategory__isnull=False
).order_by("pk")
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_one2one_exclude_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_one2one_filter_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_one2one_exclude_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(
category__onetoonecategory__isnull=True
).order_by("pk")
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
class Queries5Tests(TestCase):
@classmethod
def setUpTestData(cls):
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the
# Meta.ordering will be rank3, rank2, rank1.
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.n2 = Note.objects.create(note="n2", misc="bar", id=2)
e1 = ExtraInfo.objects.create(info="e1", note=cls.n1)
e2 = ExtraInfo.objects.create(info="e2", note=cls.n2)
a1 = Author.objects.create(name="a1", num=1001, extra=e1)
a2 = Author.objects.create(name="a2", num=2002, extra=e1)
a3 = Author.objects.create(name="a3", num=3003, extra=e2)
cls.rank2 = Ranking.objects.create(rank=2, author=a2)
cls.rank1 = Ranking.objects.create(rank=1, author=a3)
cls.rank3 = Ranking.objects.create(rank=3, author=a1)
def test_ordering(self):
# Cross model ordering is possible in Meta, too.
self.assertSequenceEqual(
Ranking.objects.all(),
[self.rank3, self.rank2, self.rank1],
)
self.assertSequenceEqual(
Ranking.objects.order_by("rank"),
[self.rank1, self.rank2, self.rank3],
)
# Ordering of extra() pieces is possible, too and you can mix extra
# fields and model fields in the ordering.
self.assertSequenceEqual(
Ranking.objects.extra(
tables=["django_site"], order_by=["-django_site.id", "rank"]
),
[self.rank1, self.rank2, self.rank3],
)
sql = "case when %s > 2 then 1 else 0 end" % connection.ops.quote_name("rank")
qs = Ranking.objects.extra(select={"good": sql})
self.assertEqual(
[o.good for o in qs.extra(order_by=("-good",))], [True, False, False]
)
self.assertSequenceEqual(
qs.extra(order_by=("-good", "id")),
[self.rank3, self.rank2, self.rank1],
)
# Despite having some extra aliases in the query, we can still omit
# them in a values() query.
dicts = qs.values("id", "rank").order_by("id")
self.assertEqual([d["rank"] for d in dicts], [2, 1, 3])
def test_ticket7256(self):
# An empty values() call includes all aliases, including those from an
# extra()
sql = "case when %s > 2 then 1 else 0 end" % connection.ops.quote_name("rank")
qs = Ranking.objects.extra(select={"good": sql})
dicts = qs.values().order_by("id")
for d in dicts:
del d["id"]
del d["author_id"]
self.assertEqual(
[sorted(d.items()) for d in dicts],
[
[("good", 0), ("rank", 2)],
[("good", 0), ("rank", 1)],
[("good", 1), ("rank", 3)],
],
)
def test_ticket7045(self):
# Extra tables used to crash SQL construction on the second use.
qs = Ranking.objects.extra(tables=["django_site"])
qs.query.get_compiler(qs.db).as_sql()
# test passes if this doesn't raise an exception.
qs.query.get_compiler(qs.db).as_sql()
def test_ticket9848(self):
# Make sure that updates which only filter on sub-tables don't
# inadvertently update the wrong records (bug #9848).
author_start = Author.objects.get(name="a1")
ranking_start = Ranking.objects.get(author__name="a1")
# Make sure that the IDs from different tables don't happen to match.
self.assertSequenceEqual(
Ranking.objects.filter(author__name="a1"),
[self.rank3],
)
self.assertEqual(Ranking.objects.filter(author__name="a1").update(rank=4636), 1)
r = Ranking.objects.get(author__name="a1")
self.assertEqual(r.id, ranking_start.id)
self.assertEqual(r.author.id, author_start.id)
self.assertEqual(r.rank, 4636)
r.rank = 3
r.save()
self.assertSequenceEqual(
Ranking.objects.all(),
[self.rank3, self.rank2, self.rank1],
)
def test_ticket5261(self):
# Test different empty excludes.
self.assertSequenceEqual(
Note.objects.exclude(Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.filter(~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.filter(~Q() | ~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.exclude(~Q() & ~Q()),
[self.n1, self.n2],
)
self.assertSequenceEqual(
Note.objects.exclude(~Q() ^ ~Q()),
[self.n1, self.n2],
)
def test_extra_select_literal_percent_s(self):
# Allow %%s to escape select clauses
self.assertEqual(Note.objects.extra(select={"foo": "'%%s'"})[0].foo, "%s")
self.assertEqual(
Note.objects.extra(select={"foo": "'%%s bar %%s'"})[0].foo, "%s bar %s"
)
self.assertEqual(
Note.objects.extra(select={"foo": "'bar %%s'"})[0].foo, "bar %s"
)
def test_extra_select_alias_sql_injection(self):
crafted_alias = """injected_name" from "queries_note"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Note.objects.extra(select={crafted_alias: "1"})
def test_queryset_reuse(self):
# Using querysets doesn't mutate aliases.
authors = Author.objects.filter(Q(name="a1") | Q(name="nonexistent"))
self.assertEqual(Ranking.objects.filter(author__in=authors).get(), self.rank3)
self.assertEqual(authors.count(), 1)
def test_filter_unsaved_object(self):
# These tests will catch ValueError in Django 5.0 when passing unsaved
# model instances to related filters becomes forbidden.
# msg = "Model instances passed to related filters must be saved."
msg = "Passing unsaved model instances to related filters is deprecated."
company = Company.objects.create(name="Django")
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.filter(employer=Company(name="unsaved"))
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.filter(employer__in=[company, Company(name="unsaved")])
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
StaffUser.objects.filter(staff=Staff(name="unsaved"))
class SelectRelatedTests(TestCase):
def test_tickets_3045_3288(self):
# Once upon a time, select_related() with circular relations would loop
# infinitely if you forgot to specify "depth". Now we set an arbitrary
# default upper bound.
self.assertQuerysetEqual(X.objects.all(), [])
self.assertQuerysetEqual(X.objects.select_related(), [])
class SubclassFKTests(TestCase):
def test_ticket7778(self):
# Model subclasses could not be deleted if a nullable foreign key
# relates to a model that relates back.
num_celebs = Celebrity.objects.count()
tvc = TvChef.objects.create(name="Huey")
self.assertEqual(Celebrity.objects.count(), num_celebs + 1)
Fan.objects.create(fan_of=tvc)
Fan.objects.create(fan_of=tvc)
tvc.delete()
# The parent object should have been deleted as well.
self.assertEqual(Celebrity.objects.count(), num_celebs)
class CustomPkTests(TestCase):
def test_ticket7371(self):
self.assertQuerysetEqual(Related.objects.order_by("custom"), [])
class NullableRelOrderingTests(TestCase):
def test_ticket10028(self):
# Ordering by model related to nullable relations(!) should use outer
# joins, so that all results are included.
p1 = Plaything.objects.create(name="p1")
self.assertSequenceEqual(Plaything.objects.all(), [p1])
def test_join_already_in_query(self):
# Ordering by model related to nullable relations should not change
# the join type of already existing joins.
Plaything.objects.create(name="p1")
s = SingleObject.objects.create(name="s")
r = RelatedObject.objects.create(single=s, f=1)
p2 = Plaything.objects.create(name="p2", others=r)
qs = Plaything.objects.filter(others__isnull=False).order_by("pk")
self.assertNotIn("JOIN", str(qs.query))
qs = Plaything.objects.filter(others__f__isnull=False).order_by("pk")
self.assertIn("INNER", str(qs.query))
qs = qs.order_by("others__single__name")
# The ordering by others__single__pk will add one new join (to single)
# and that join must be LEFT join. The already existing join to related
# objects must be kept INNER. So, we have both an INNER and a LEFT join
# in the query.
self.assertEqual(str(qs.query).count("LEFT"), 1)
self.assertEqual(str(qs.query).count("INNER"), 1)
self.assertSequenceEqual(qs, [p2])
class DisjunctiveFilterTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.e1 = ExtraInfo.objects.create(info="e1", note=cls.n1)
def test_ticket7872(self):
# Another variation on the disjunctive filtering theme.
# For the purposes of this regression test, it's important that there is no
# Join object related to the LeafA we create.
l1 = LeafA.objects.create(data="first")
self.assertSequenceEqual(LeafA.objects.all(), [l1])
self.assertSequenceEqual(
LeafA.objects.filter(Q(data="first") | Q(join__b__data="second")),
[l1],
)
def test_ticket8283(self):
# Checking that applying filters after a disjunction works correctly.
self.assertSequenceEqual(
(
ExtraInfo.objects.filter(note=self.n1)
| ExtraInfo.objects.filter(info="e2")
).filter(note=self.n1),
[self.e1],
)
self.assertSequenceEqual(
(
ExtraInfo.objects.filter(info="e2")
| ExtraInfo.objects.filter(note=self.n1)
).filter(note=self.n1),
[self.e1],
)
class Queries6Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name="t1", category=generic)
cls.t2 = Tag.objects.create(name="t2", parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name="t3", parent=cls.t1)
cls.t4 = Tag.objects.create(name="t4", parent=cls.t3)
cls.t5 = Tag.objects.create(name="t5", parent=cls.t3)
n1 = Note.objects.create(note="n1", misc="foo", id=1)
cls.ann1 = Annotation.objects.create(name="a1", tag=cls.t1)
cls.ann1.notes.add(n1)
cls.ann2 = Annotation.objects.create(name="a2", tag=cls.t4)
def test_parallel_iterators(self):
# Parallel iterators work.
qs = Tag.objects.all()
i1, i2 = iter(qs), iter(qs)
self.assertEqual(repr(next(i1)), "<Tag: t1>")
self.assertEqual(repr(next(i1)), "<Tag: t2>")
self.assertEqual(repr(next(i2)), "<Tag: t1>")
self.assertEqual(repr(next(i2)), "<Tag: t2>")
self.assertEqual(repr(next(i2)), "<Tag: t3>")
self.assertEqual(repr(next(i1)), "<Tag: t3>")
qs = X.objects.all()
self.assertFalse(qs)
self.assertFalse(qs)
def test_nested_queries_sql(self):
# Nested queries should not evaluate the inner query as part of constructing the
# SQL (so we should see a nested query here, indicated by two "SELECT" calls).
qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy"))
self.assertEqual(qs.query.get_compiler(qs.db).as_sql()[0].count("SELECT"), 2)
def test_tickets_8921_9188(self):
# Incorrect SQL was being generated for certain types of exclude()
# queries that crossed multi-valued relations (#8921, #9188 and some
# preemptively discovered cases).
self.assertSequenceEqual(
PointerA.objects.filter(connection__pointerb__id=1), []
)
self.assertSequenceEqual(
PointerA.objects.exclude(connection__pointerb__id=1), []
)
self.assertSequenceEqual(
Tag.objects.exclude(children=None),
[self.t1, self.t3],
)
# This example is tricky because the parent could be NULL, so only checking
# parents with annotations omits some results (tag t1, in this case).
self.assertSequenceEqual(
Tag.objects.exclude(parent__annotation__name="a1"),
[self.t1, self.t4, self.t5],
)
# The annotation->tag link is single values and tag->children links is
# multi-valued. So we have to split the exclude filter in the middle
# and then optimize the inner query without losing results.
self.assertSequenceEqual(
Annotation.objects.exclude(tag__children__name="t2"),
[self.ann2],
)
# Nested queries are possible (although should be used with care, since
# they have performance problems on backends like MySQL.
self.assertSequenceEqual(
Annotation.objects.filter(notes__in=Note.objects.filter(note="n1")),
[self.ann1],
)
def test_ticket3739(self):
# The all() method on querysets returns a copy of the queryset.
q1 = Tag.objects.order_by("name")
self.assertIsNot(q1, q1.all())
def test_ticket_11320(self):
qs = Tag.objects.exclude(category=None).exclude(category__name="foo")
self.assertEqual(str(qs.query).count(" INNER JOIN "), 1)
def test_distinct_ordered_sliced_subquery_aggregation(self):
self.assertEqual(
Tag.objects.distinct().order_by("category__name")[:3].count(), 3
)
def test_multiple_columns_with_the_same_name_slice(self):
self.assertEqual(
list(
Tag.objects.order_by("name").values_list("name", "category__name")[:2]
),
[("t1", "Generic"), ("t2", "Generic")],
)
self.assertSequenceEqual(
Tag.objects.order_by("name").select_related("category")[:2],
[self.t1, self.t2],
)
self.assertEqual(
list(Tag.objects.order_by("-name").values_list("name", "parent__name")[:2]),
[("t5", "t3"), ("t4", "t3")],
)
self.assertSequenceEqual(
Tag.objects.order_by("-name").select_related("parent")[:2],
[self.t5, self.t4],
)
def test_col_alias_quoted(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertEqual(
Tag.objects.values("parent")
.annotate(
tag_per_parent=Count("pk"),
)
.aggregate(Max("tag_per_parent")),
{"tag_per_parent__max": 2},
)
sql = captured_queries[0]["sql"]
self.assertIn("AS %s" % connection.ops.quote_name("col1"), sql)
def test_xor_subquery(self):
self.assertSequenceEqual(
Tag.objects.filter(
Exists(Tag.objects.filter(id=OuterRef("id"), name="t3"))
^ Exists(Tag.objects.filter(id=OuterRef("id"), parent=self.t1))
),
[self.t2],
)
class RawQueriesTests(TestCase):
@classmethod
def setUpTestData(cls):
Note.objects.create(note="n1", misc="foo", id=1)
def test_ticket14729(self):
# Test representation of raw query with one or few parameters passed as list
query = "SELECT * FROM queries_note WHERE note = %s"
params = ["n1"]
qs = Note.objects.raw(query, params=params)
self.assertEqual(
repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1>"
)
query = "SELECT * FROM queries_note WHERE note = %s and misc = %s"
params = ["n1", "foo"]
qs = Note.objects.raw(query, params=params)
self.assertEqual(
repr(qs),
"<RawQuerySet: SELECT * FROM queries_note WHERE note = n1 and misc = foo>",
)
class GeneratorExpressionTests(SimpleTestCase):
def test_ticket10432(self):
# Using an empty iterator as the rvalue for an "__in"
# lookup is legal.
self.assertCountEqual(Note.objects.filter(pk__in=iter(())), [])
class ComparisonTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n1 = Note.objects.create(note="n1", misc="foo", id=1)
e1 = ExtraInfo.objects.create(info="e1", note=cls.n1)
cls.a2 = Author.objects.create(name="a2", num=2002, extra=e1)
def test_ticket8597(self):
# Regression tests for case-insensitive comparisons
item_ab = Item.objects.create(
name="a_b", created=datetime.datetime.now(), creator=self.a2, note=self.n1
)
item_xy = Item.objects.create(
name="x%y", created=datetime.datetime.now(), creator=self.a2, note=self.n1
)
self.assertSequenceEqual(
Item.objects.filter(name__iexact="A_b"),
[item_ab],
)
self.assertSequenceEqual(
Item.objects.filter(name__iexact="x%Y"),
[item_xy],
)
self.assertSequenceEqual(
Item.objects.filter(name__istartswith="A_b"),
[item_ab],
)
self.assertSequenceEqual(
Item.objects.filter(name__iendswith="A_b"),
[item_ab],
)
class ExistsSql(TestCase):
def test_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertFalse(Tag.objects.exists())
# Ok - so the exist query worked - but did it include too many columns?
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]["sql"]
id, name = connection.ops.quote_name("id"), connection.ops.quote_name("name")
self.assertNotIn(id, qstr)
self.assertNotIn(name, qstr)
def test_distinct_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertIs(Article.objects.distinct().exists(), False)
self.assertEqual(len(captured_queries), 1)
captured_sql = captured_queries[0]["sql"]
self.assertNotIn(connection.ops.quote_name("id"), captured_sql)
self.assertNotIn(connection.ops.quote_name("name"), captured_sql)
def test_sliced_distinct_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertIs(Article.objects.distinct()[1:3].exists(), False)
self.assertEqual(len(captured_queries), 1)
captured_sql = captured_queries[0]["sql"]
self.assertIn(connection.ops.quote_name("id"), captured_sql)
self.assertIn(connection.ops.quote_name("name"), captured_sql)
def test_ticket_18414(self):
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="two", created=datetime.datetime.now())
self.assertTrue(Article.objects.exists())
self.assertTrue(Article.objects.distinct().exists())
self.assertTrue(Article.objects.distinct()[1:3].exists())
self.assertFalse(Article.objects.distinct()[1:1].exists())
@skipUnlessDBFeature("can_distinct_on_fields")
def test_ticket_18414_distinct_on(self):
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="two", created=datetime.datetime.now())
self.assertTrue(Article.objects.distinct("name").exists())
self.assertTrue(Article.objects.distinct("name")[1:2].exists())
self.assertFalse(Article.objects.distinct("name")[2:3].exists())
class QuerysetOrderedTests(unittest.TestCase):
"""
Tests for the Queryset.ordered attribute.
"""
def test_no_default_or_explicit_ordering(self):
self.assertIs(Annotation.objects.all().ordered, False)
def test_cleared_default_ordering(self):
self.assertIs(Tag.objects.all().ordered, True)
self.assertIs(Tag.objects.order_by().ordered, False)
def test_explicit_ordering(self):
self.assertIs(Annotation.objects.order_by("id").ordered, True)
def test_empty_queryset(self):
self.assertIs(Annotation.objects.none().ordered, True)
def test_order_by_extra(self):
self.assertIs(Annotation.objects.extra(order_by=["id"]).ordered, True)
def test_annotated_ordering(self):
qs = Annotation.objects.annotate(num_notes=Count("notes"))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by("num_notes").ordered, True)
def test_annotated_default_ordering(self):
qs = Tag.objects.annotate(num_notes=Count("pk"))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by("name").ordered, True)
def test_annotated_values_default_ordering(self):
qs = Tag.objects.values("name").annotate(num_notes=Count("pk"))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by("name").ordered, True)
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
class SubqueryTests(TestCase):
@classmethod
def setUpTestData(cls):
NamedCategory.objects.create(id=1, name="first")
NamedCategory.objects.create(id=2, name="second")
NamedCategory.objects.create(id=3, name="third")
NamedCategory.objects.create(id=4, name="fourth")
def test_ordered_subselect(self):
"Subselects honor any manual ordering"
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[0:2]
)
self.assertEqual(set(query.values_list("id", flat=True)), {3, 4})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[:2]
)
self.assertEqual(set(query.values_list("id", flat=True)), {3, 4})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:2]
)
self.assertEqual(set(query.values_list("id", flat=True)), {3})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[2:]
)
self.assertEqual(set(query.values_list("id", flat=True)), {1, 2})
def test_slice_subquery_and_query(self):
"""
Slice a query that has a sliced subquery
"""
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[0:2]
)[0:2]
self.assertEqual({x.id for x in query}, {3, 4})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:3]
)[1:3]
self.assertEqual({x.id for x in query}, {3})
query = DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[2:]
)[1:]
self.assertEqual({x.id for x in query}, {2})
def test_related_sliced_subquery(self):
"""
Related objects constraints can safely contain sliced subqueries.
refs #22434
"""
generic = NamedCategory.objects.create(id=5, name="Generic")
t1 = Tag.objects.create(name="t1", category=generic)
t2 = Tag.objects.create(name="t2", category=generic)
ManagedModel.objects.create(data="mm1", tag=t1, public=True)
mm2 = ManagedModel.objects.create(data="mm2", tag=t2, public=True)
query = ManagedModel.normal_manager.filter(
tag__in=Tag.objects.order_by("-id")[:1]
)
self.assertEqual({x.id for x in query}, {mm2.id})
def test_sliced_delete(self):
"Delete queries can safely contain sliced subqueries"
DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[0:1]
).delete()
self.assertEqual(
set(DumbCategory.objects.values_list("id", flat=True)), {1, 2, 3}
)
DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:2]
).delete()
self.assertEqual(set(DumbCategory.objects.values_list("id", flat=True)), {1, 3})
DumbCategory.objects.filter(
id__in=DumbCategory.objects.order_by("-id")[1:]
).delete()
self.assertEqual(set(DumbCategory.objects.values_list("id", flat=True)), {3})
def test_distinct_ordered_sliced_subquery(self):
# Implicit values('id').
self.assertSequenceEqual(
NamedCategory.objects.filter(
id__in=NamedCategory.objects.distinct().order_by("name")[0:2],
)
.order_by("name")
.values_list("name", flat=True),
["first", "fourth"],
)
# Explicit values('id').
self.assertSequenceEqual(
NamedCategory.objects.filter(
id__in=NamedCategory.objects.distinct()
.order_by("-name")
.values("id")[0:2],
)
.order_by("name")
.values_list("name", flat=True),
["second", "third"],
)
# Annotated value.
self.assertSequenceEqual(
DumbCategory.objects.filter(
id__in=DumbCategory.objects.annotate(double_id=F("id") * 2)
.order_by("id")
.distinct()
.values("double_id")[0:2],
)
.order_by("id")
.values_list("id", flat=True),
[2, 4],
)
class QuerySetBitwiseOperationTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.school = School.objects.create()
cls.room_1 = Classroom.objects.create(
school=cls.school, has_blackboard=False, name="Room 1"
)
cls.room_2 = Classroom.objects.create(
school=cls.school, has_blackboard=True, name="Room 2"
)
cls.room_3 = Classroom.objects.create(
school=cls.school, has_blackboard=True, name="Room 3"
)
cls.room_4 = Classroom.objects.create(
school=cls.school, has_blackboard=False, name="Room 4"
)
tag = Tag.objects.create()
cls.annotation_1 = Annotation.objects.create(tag=tag)
annotation_2 = Annotation.objects.create(tag=tag)
note = cls.annotation_1.notes.create(tag=tag)
cls.base_user_1 = BaseUser.objects.create(annotation=cls.annotation_1)
cls.base_user_2 = BaseUser.objects.create(annotation=annotation_2)
cls.task = Task.objects.create(
owner=cls.base_user_2,
creator=cls.base_user_2,
note=note,
)
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_rhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)
qs2 = Classroom.objects.filter(has_blackboard=False)[:1]
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_3])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_lhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)[:1]
qs2 = Classroom.objects.filter(has_blackboard=False)
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_4])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_both_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=False)[:1]
qs2 = Classroom.objects.filter(has_blackboard=True)[:1]
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_or_with_both_slice_and_ordering(self):
qs1 = Classroom.objects.filter(has_blackboard=False).order_by("-pk")[:1]
qs2 = Classroom.objects.filter(has_blackboard=True).order_by("-name")[:1]
self.assertCountEqual(qs1 | qs2, [self.room_3, self.room_4])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_rhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)
qs2 = Classroom.objects.filter(has_blackboard=False)[:1]
self.assertCountEqual(qs1 ^ qs2, [self.room_1, self.room_2, self.room_3])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_lhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)[:1]
qs2 = Classroom.objects.filter(has_blackboard=False)
self.assertCountEqual(qs1 ^ qs2, [self.room_1, self.room_2, self.room_4])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_both_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=False)[:1]
qs2 = Classroom.objects.filter(has_blackboard=True)[:1]
self.assertCountEqual(qs1 ^ qs2, [self.room_1, self.room_2])
@skipUnlessDBFeature("allow_sliced_subqueries_with_in")
def test_xor_with_both_slice_and_ordering(self):
qs1 = Classroom.objects.filter(has_blackboard=False).order_by("-pk")[:1]
qs2 = Classroom.objects.filter(has_blackboard=True).order_by("-name")[:1]
self.assertCountEqual(qs1 ^ qs2, [self.room_3, self.room_4])
def test_subquery_aliases(self):
combined = School.objects.filter(pk__isnull=False) & School.objects.filter(
Exists(
Classroom.objects.filter(
has_blackboard=True,
school=OuterRef("pk"),
)
),
)
self.assertSequenceEqual(combined, [self.school])
nested_combined = School.objects.filter(pk__in=combined.values("pk"))
self.assertSequenceEqual(nested_combined, [self.school])
def test_conflicting_aliases_during_combine(self):
qs1 = self.annotation_1.baseuser_set.all()
qs2 = BaseUser.objects.filter(
Q(owner__note__in=self.annotation_1.notes.all())
| Q(creator__note__in=self.annotation_1.notes.all())
)
self.assertSequenceEqual(qs1, [self.base_user_1])
self.assertSequenceEqual(qs2, [self.base_user_2])
self.assertCountEqual(qs2 | qs1, qs1 | qs2)
self.assertCountEqual(qs2 | qs1, [self.base_user_1, self.base_user_2])
class CloneTests(TestCase):
def test_evaluated_queryset_as_argument(self):
"""
If a queryset is already evaluated, it can still be used as a query arg.
"""
n = Note(note="Test1", misc="misc")
n.save()
e = ExtraInfo(info="good", note=n)
e.save()
n_list = Note.objects.all()
# Evaluate the Note queryset, populating the query cache
list(n_list)
# Make one of cached results unpickable.
n_list._result_cache[0].lock = Lock()
with self.assertRaises(TypeError):
pickle.dumps(n_list)
# Use the note queryset in a query, and evaluate
# that query in a way that involves cloning.
self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, "good")
def test_no_model_options_cloning(self):
"""
Cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta)
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail(
"Model options shouldn't be cloned."
)
try:
Note.objects.filter(pk__lte=F("pk") + 1).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
def test_no_fields_cloning(self):
"""
Cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta.get_field("misc"))
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail(
"Model fields shouldn't be cloned"
)
try:
Note.objects.filter(note=F("misc")).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
class EmptyQuerySetTests(SimpleTestCase):
def test_emptyqueryset_values(self):
# #14366 -- Calling .values() on an empty QuerySet and then cloning
# that should not cause an error
self.assertCountEqual(Number.objects.none().values("num").order_by("num"), [])
def test_values_subquery(self):
self.assertCountEqual(
Number.objects.filter(pk__in=Number.objects.none().values("pk")), []
)
self.assertCountEqual(
Number.objects.filter(pk__in=Number.objects.none().values_list("pk")), []
)
def test_ticket_19151(self):
# #19151 -- Calling .values() or .values_list() on an empty QuerySet
# should return an empty QuerySet and not cause an error.
q = Author.objects.none()
self.assertCountEqual(q.values(), [])
self.assertCountEqual(q.values_list(), [])
class ValuesQuerysetTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=72)
def test_flat_values_list(self):
qs = Number.objects.values_list("num")
qs = qs.values_list("num", flat=True)
self.assertSequenceEqual(qs, [72])
def test_extra_values(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={"value_plus_x": "num+%s", "value_minus_x": "num-%s"},
select_params=(1, 2),
)
qs = qs.order_by("value_minus_x")
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_values_order_twice(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={"value_plus_one": "num+1", "value_minus_one": "num-1"}
)
qs = qs.order_by("value_minus_one").order_by("value_plus_one")
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_values_order_multiple(self):
# Postgres doesn't allow constants in order by, so check for that.
qs = Number.objects.extra(
select={
"value_plus_one": "num+1",
"value_minus_one": "num-1",
"constant_value": "1",
}
)
qs = qs.order_by("value_plus_one", "value_minus_one", "constant_value")
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_values_order_in_extra(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={"value_plus_one": "num+1", "value_minus_one": "num-1"},
order_by=["value_minus_one"],
)
qs = qs.values("num")
def test_extra_select_params_values_order_in_extra(self):
# testing for 23259 issue
qs = Number.objects.extra(
select={"value_plus_x": "num+%s"},
select_params=[1],
order_by=["value_plus_x"],
)
qs = qs.filter(num=72)
qs = qs.values("num")
self.assertSequenceEqual(qs, [{"num": 72}])
def test_extra_multiple_select_params_values_order_by(self):
# testing for 23259 issue
qs = Number.objects.extra(
select={"value_plus_x": "num+%s", "value_minus_x": "num-%s"},
select_params=(72, 72),
)
qs = qs.order_by("value_minus_x")
qs = qs.filter(num=1)
qs = qs.values("num")
self.assertSequenceEqual(qs, [])
def test_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={"value_plus_one": "num+1"})
qs = qs.order_by("value_plus_one")
qs = qs.values_list("num")
self.assertSequenceEqual(qs, [(72,)])
def test_flat_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={"value_plus_one": "num+1"})
qs = qs.order_by("value_plus_one")
qs = qs.values_list("num", flat=True)
self.assertSequenceEqual(qs, [72])
def test_field_error_values_list(self):
# see #23443
msg = (
"Cannot resolve keyword %r into field. Join on 'name' not permitted."
% "foo"
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.values_list("name__foo")
def test_named_values_list_flat(self):
msg = "'flat' and 'named' can't be used together."
with self.assertRaisesMessage(TypeError, msg):
Number.objects.values_list("num", flat=True, named=True)
def test_named_values_list_bad_field_name(self):
msg = "Type names and field names must be valid identifiers: '1'"
with self.assertRaisesMessage(ValueError, msg):
Number.objects.extra(select={"1": "num+1"}).values_list(
"1", named=True
).first()
def test_named_values_list_with_fields(self):
qs = Number.objects.extra(select={"num2": "num+1"}).annotate(Count("id"))
values = qs.values_list("num", "num2", named=True).first()
self.assertEqual(type(values).__name__, "Row")
self.assertEqual(values._fields, ("num", "num2"))
self.assertEqual(values.num, 72)
self.assertEqual(values.num2, 73)
def test_named_values_list_without_fields(self):
qs = Number.objects.extra(select={"num2": "num+1"}).annotate(Count("id"))
values = qs.values_list(named=True).first()
self.assertEqual(type(values).__name__, "Row")
self.assertEqual(
values._fields,
("num2", "id", "num", "other_num", "another_num", "id__count"),
)
self.assertEqual(values.num, 72)
self.assertEqual(values.num2, 73)
self.assertEqual(values.id__count, 1)
def test_named_values_list_expression_with_default_alias(self):
expr = Count("id")
values = (
Number.objects.annotate(id__count1=expr)
.values_list(expr, "id__count1", named=True)
.first()
)
self.assertEqual(values._fields, ("id__count2", "id__count1"))
def test_named_values_list_expression(self):
expr = F("num") + 1
qs = Number.objects.annotate(combinedexpression1=expr).values_list(
expr, "combinedexpression1", named=True
)
values = qs.first()
self.assertEqual(values._fields, ("combinedexpression2", "combinedexpression1"))
def test_named_values_pickle(self):
value = Number.objects.values_list("num", "other_num", named=True).get()
self.assertEqual(value, (72, None))
self.assertEqual(pickle.loads(pickle.dumps(value)), value)
class QuerySetSupportsPythonIdioms(TestCase):
@classmethod
def setUpTestData(cls):
some_date = datetime.datetime(2014, 5, 16, 12, 1)
cls.articles = [
Article.objects.create(name=f"Article {i}", created=some_date)
for i in range(1, 8)
]
def get_ordered_articles(self):
return Article.objects.order_by("name")
def test_can_get_items_using_index_and_slice_notation(self):
self.assertEqual(self.get_ordered_articles()[0].name, "Article 1")
self.assertSequenceEqual(
self.get_ordered_articles()[1:3],
[self.articles[1], self.articles[2]],
)
def test_slicing_with_steps_can_be_used(self):
self.assertSequenceEqual(
self.get_ordered_articles()[::2],
[
self.articles[0],
self.articles[2],
self.articles[4],
self.articles[6],
],
)
def test_slicing_without_step_is_lazy(self):
with self.assertNumQueries(0):
self.get_ordered_articles()[0:5]
def test_slicing_with_tests_is_not_lazy(self):
with self.assertNumQueries(1):
self.get_ordered_articles()[0:5:3]
def test_slicing_can_slice_again_after_slicing(self):
self.assertSequenceEqual(
self.get_ordered_articles()[0:5][0:2],
[self.articles[0], self.articles[1]],
)
self.assertSequenceEqual(
self.get_ordered_articles()[0:5][4:], [self.articles[4]]
)
self.assertSequenceEqual(self.get_ordered_articles()[0:5][5:], [])
# Some more tests!
self.assertSequenceEqual(
self.get_ordered_articles()[2:][0:2],
[self.articles[2], self.articles[3]],
)
self.assertSequenceEqual(
self.get_ordered_articles()[2:][:2],
[self.articles[2], self.articles[3]],
)
self.assertSequenceEqual(
self.get_ordered_articles()[2:][2:3], [self.articles[4]]
)
# Using an offset without a limit is also possible.
self.assertSequenceEqual(
self.get_ordered_articles()[5:],
[self.articles[5], self.articles[6]],
)
def test_slicing_cannot_filter_queryset_once_sliced(self):
msg = "Cannot filter a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].filter(id=1)
def test_slicing_cannot_reorder_queryset_once_sliced(self):
msg = "Cannot reorder a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].order_by("id")
def test_slicing_cannot_combine_queries_once_sliced(self):
msg = "Cannot combine queries once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:1] & Article.objects.all()[4:5]
def test_slicing_negative_indexing_not_supported_for_single_element(self):
"""hint: inverting your ordering might do what you need"""
msg = "Negative indexing is not supported."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[-1]
def test_slicing_negative_indexing_not_supported_for_range(self):
"""hint: inverting your ordering might do what you need"""
msg = "Negative indexing is not supported."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[0:-5]
with self.assertRaisesMessage(ValueError, msg):
Article.objects.all()[-1:]
def test_invalid_index(self):
msg = "QuerySet indices must be integers or slices, not str."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()["foo"]
def test_can_get_number_of_items_in_queryset_using_standard_len(self):
self.assertEqual(len(Article.objects.filter(name__exact="Article 1")), 1)
def test_can_combine_queries_using_and_and_or_operators(self):
s1 = Article.objects.filter(name__exact="Article 1")
s2 = Article.objects.filter(name__exact="Article 2")
self.assertSequenceEqual(
(s1 | s2).order_by("name"),
[self.articles[0], self.articles[1]],
)
self.assertSequenceEqual(s1 & s2, [])
class WeirdQuerysetSlicingTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=1)
Number.objects.create(num=2)
Article.objects.create(name="one", created=datetime.datetime.now())
Article.objects.create(name="two", created=datetime.datetime.now())
Article.objects.create(name="three", created=datetime.datetime.now())
Article.objects.create(name="four", created=datetime.datetime.now())
food = Food.objects.create(name="spam")
Eaten.objects.create(meal="spam with eggs", food=food)
def test_tickets_7698_10202(self):
# People like to slice with '0' as the high-water mark.
self.assertQuerysetEqual(Article.objects.all()[0:0], [])
self.assertQuerysetEqual(Article.objects.all()[0:0][:10], [])
self.assertEqual(Article.objects.all()[:0].count(), 0)
msg = "Cannot change a query once a slice has been taken."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[:0].latest("created")
def test_empty_resultset_sql(self):
# ticket #12192
self.assertNumQueries(0, lambda: list(Number.objects.all()[1:1]))
def test_empty_sliced_subquery(self):
self.assertEqual(
Eaten.objects.filter(food__in=Food.objects.all()[0:0]).count(), 0
)
def test_empty_sliced_subquery_exclude(self):
self.assertEqual(
Eaten.objects.exclude(food__in=Food.objects.all()[0:0]).count(), 1
)
def test_zero_length_values_slicing(self):
n = 42
with self.assertNumQueries(0):
self.assertQuerysetEqual(Article.objects.values()[n:n], [])
self.assertQuerysetEqual(Article.objects.values_list()[n:n], [])
class EscapingTests(TestCase):
def test_ticket_7302(self):
# Reserved names are appropriately escaped
r_a = ReservedName.objects.create(name="a", order=42)
r_b = ReservedName.objects.create(name="b", order=37)
self.assertSequenceEqual(
ReservedName.objects.order_by("order"),
[r_b, r_a],
)
self.assertSequenceEqual(
ReservedName.objects.extra(
select={"stuff": "name"}, order_by=("order", "stuff")
),
[r_b, r_a],
)
class ToFieldTests(TestCase):
def test_in_query(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Eaten.objects.filter(food__in=[apple, pear])),
{lunch, dinner},
)
def test_in_subquery(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(
set(Eaten.objects.filter(food__in=Food.objects.filter(name="apple"))),
{lunch},
)
self.assertEqual(
set(
Eaten.objects.filter(
food__in=Food.objects.filter(name="apple").values("eaten__meal")
)
),
set(),
)
self.assertEqual(
set(Food.objects.filter(eaten__in=Eaten.objects.filter(meal="lunch"))),
{apple},
)
def test_nested_in_subquery(self):
extra = ExtraInfo.objects.create()
author = Author.objects.create(num=42, extra=extra)
report = Report.objects.create(creator=author)
comment = ReportComment.objects.create(report=report)
comments = ReportComment.objects.filter(
report__in=Report.objects.filter(
creator__in=extra.author_set.all(),
),
)
self.assertSequenceEqual(comments, [comment])
def test_reverse_in(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch_apple = Eaten.objects.create(food=apple, meal="lunch")
lunch_pear = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Food.objects.filter(eaten__in=[lunch_apple, lunch_pear])), {apple, pear}
)
def test_single_object(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=apple, meal="dinner")
self.assertEqual(set(Eaten.objects.filter(food=apple)), {lunch, dinner})
def test_single_object_reverse(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(set(Food.objects.filter(eaten=lunch)), {apple})
def test_recursive_fk(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(list(Node.objects.filter(parent=node1)), [node2])
def test_recursive_fk_reverse(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(list(Node.objects.filter(node=node2)), [node1])
class IsNullTests(TestCase):
def test_primary_key(self):
custom = CustomPk.objects.create(name="pk")
null = Related.objects.create()
notnull = Related.objects.create(custom=custom)
self.assertSequenceEqual(
Related.objects.filter(custom__isnull=False), [notnull]
)
self.assertSequenceEqual(Related.objects.filter(custom__isnull=True), [null])
def test_to_field(self):
apple = Food.objects.create(name="apple")
e1 = Eaten.objects.create(food=apple, meal="lunch")
e2 = Eaten.objects.create(meal="lunch")
self.assertSequenceEqual(
Eaten.objects.filter(food__isnull=False),
[e1],
)
self.assertSequenceEqual(
Eaten.objects.filter(food__isnull=True),
[e2],
)
class ConditionalTests(TestCase):
"""Tests whose execution depend on different environment conditions like
Python version or DB backend features"""
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
t1 = Tag.objects.create(name="t1", category=generic)
Tag.objects.create(name="t2", parent=t1, category=generic)
t3 = Tag.objects.create(name="t3", parent=t1)
Tag.objects.create(name="t4", parent=t3)
Tag.objects.create(name="t5", parent=t3)
def test_infinite_loop(self):
# If you're not careful, it's possible to introduce infinite loops via
# default ordering on foreign keys in a cycle. We detect that.
with self.assertRaisesMessage(FieldError, "Infinite loop caused by ordering."):
list(LoopX.objects.all()) # Force queryset evaluation with list()
with self.assertRaisesMessage(FieldError, "Infinite loop caused by ordering."):
list(LoopZ.objects.all()) # Force queryset evaluation with list()
# Note that this doesn't cause an infinite loop, since the default
# ordering on the Tag model is empty (and thus defaults to using "id"
# for the related field).
self.assertEqual(len(Tag.objects.order_by("parent")), 5)
# ... but you can still order in a non-recursive fashion among linked
# fields (the previous test failed because the default ordering was
# recursive).
self.assertQuerysetEqual(LoopX.objects.order_by("y__x__y__x__id"), [])
# When grouping without specifying ordering, we add an explicit "ORDER BY NULL"
# portion in MySQL to prevent unnecessary sorting.
@skipUnlessDBFeature("requires_explicit_null_ordering_when_grouping")
def test_null_ordering_added(self):
query = Tag.objects.values_list("parent_id", flat=True).order_by().query
query.group_by = ["parent_id"]
sql = query.get_compiler(DEFAULT_DB_ALIAS).as_sql()[0]
fragment = "ORDER BY "
pos = sql.find(fragment)
self.assertEqual(sql.find(fragment, pos + 1), -1)
self.assertEqual(sql.find("NULL", pos + len(fragment)), pos + len(fragment))
def test_in_list_limit(self):
# The "in" lookup works with lists of 1000 items or more.
# The numbers amount is picked to force three different IN batches
# for Oracle, yet to be less than 2100 parameter limit for MSSQL.
numbers = list(range(2050))
max_query_params = connection.features.max_query_params
if max_query_params is None or max_query_params >= len(numbers):
Number.objects.bulk_create(Number(num=num) for num in numbers)
for number in [1000, 1001, 2000, len(numbers)]:
with self.subTest(number=number):
self.assertEqual(
Number.objects.filter(num__in=numbers[:number]).count(), number
)
class UnionTests(unittest.TestCase):
"""
Tests for the union of two querysets. Bug #12252.
"""
@classmethod
def setUpTestData(cls):
objectas = []
objectbs = []
objectcs = []
a_info = ["one", "two", "three"]
for name in a_info:
o = ObjectA(name=name)
o.save()
objectas.append(o)
b_info = [
("un", 1, objectas[0]),
("deux", 2, objectas[0]),
("trois", 3, objectas[2]),
]
for name, number, objecta in b_info:
o = ObjectB(name=name, num=number, objecta=objecta)
o.save()
objectbs.append(o)
c_info = [("ein", objectas[2], objectbs[2]), ("zwei", objectas[1], objectbs[1])]
for name, objecta, objectb in c_info:
o = ObjectC(name=name, objecta=objecta, objectb=objectb)
o.save()
objectcs.append(o)
def check_union(self, model, Q1, Q2):
filter = model.objects.filter
self.assertEqual(set(filter(Q1) | filter(Q2)), set(filter(Q1 | Q2)))
self.assertEqual(set(filter(Q2) | filter(Q1)), set(filter(Q1 | Q2)))
def test_A_AB(self):
Q1 = Q(name="two")
Q2 = Q(objectb__name="deux")
self.check_union(ObjectA, Q1, Q2)
def test_A_AB2(self):
Q1 = Q(name="two")
Q2 = Q(objectb__name="deux", objectb__num=2)
self.check_union(ObjectA, Q1, Q2)
def test_AB_ACB(self):
Q1 = Q(objectb__name="deux")
Q2 = Q(objectc__objectb__name="deux")
self.check_union(ObjectA, Q1, Q2)
def test_BAB_BAC(self):
Q1 = Q(objecta__objectb__name="deux")
Q2 = Q(objecta__objectc__name="ein")
self.check_union(ObjectB, Q1, Q2)
def test_BAB_BACB(self):
Q1 = Q(objecta__objectb__name="deux")
Q2 = Q(objecta__objectc__objectb__name="trois")
self.check_union(ObjectB, Q1, Q2)
def test_BA_BCA__BAB_BAC_BCA(self):
Q1 = Q(objecta__name="one", objectc__objecta__name="two")
Q2 = Q(
objecta__objectc__name="ein",
objectc__objecta__name="three",
objecta__objectb__name="trois",
)
self.check_union(ObjectB, Q1, Q2)
class DefaultValuesInsertTest(TestCase):
def test_no_extra_params(self):
"""
Can create an instance of a model with only the PK field (#17056)."
"""
DumbCategory.objects.create()
class ExcludeTests(TestCase):
@classmethod
def setUpTestData(cls):
f1 = Food.objects.create(name="apples")
cls.f2 = Food.objects.create(name="oranges")
Eaten.objects.create(food=f1, meal="dinner")
cls.j1 = Job.objects.create(name="Manager")
cls.r1 = Responsibility.objects.create(description="Playing golf")
cls.j2 = Job.objects.create(name="Programmer")
cls.r2 = Responsibility.objects.create(description="Programming")
JobResponsibilities.objects.create(job=cls.j1, responsibility=cls.r1)
JobResponsibilities.objects.create(job=cls.j2, responsibility=cls.r2)
def test_to_field(self):
self.assertSequenceEqual(
Food.objects.exclude(eaten__meal="dinner"),
[self.f2],
)
self.assertSequenceEqual(
Job.objects.exclude(responsibilities__description="Playing golf"),
[self.j2],
)
self.assertSequenceEqual(
Responsibility.objects.exclude(jobs__name="Manager"),
[self.r2],
)
def test_ticket14511(self):
alex = Person.objects.get_or_create(name="Alex")[0]
jane = Person.objects.get_or_create(name="Jane")[0]
oracle = Company.objects.get_or_create(name="Oracle")[0]
google = Company.objects.get_or_create(name="Google")[0]
microsoft = Company.objects.get_or_create(name="Microsoft")[0]
intel = Company.objects.get_or_create(name="Intel")[0]
def employ(employer, employee, title):
Employment.objects.get_or_create(
employee=employee, employer=employer, title=title
)
employ(oracle, alex, "Engineer")
employ(oracle, alex, "Developer")
employ(google, alex, "Engineer")
employ(google, alex, "Manager")
employ(microsoft, alex, "Manager")
employ(intel, alex, "Manager")
employ(microsoft, jane, "Developer")
employ(intel, jane, "Manager")
alex_tech_employers = (
alex.employers.filter(employment__title__in=("Engineer", "Developer"))
.distinct()
.order_by("name")
)
self.assertSequenceEqual(alex_tech_employers, [google, oracle])
alex_nontech_employers = (
alex.employers.exclude(employment__title__in=("Engineer", "Developer"))
.distinct()
.order_by("name")
)
self.assertSequenceEqual(alex_nontech_employers, [google, intel, microsoft])
def test_exclude_reverse_fk_field_ref(self):
tag = Tag.objects.create()
Note.objects.create(tag=tag, note="note")
annotation = Annotation.objects.create(name="annotation", tag=tag)
self.assertEqual(
Annotation.objects.exclude(tag__note__note=F("name")).get(), annotation
)
def test_exclude_with_circular_fk_relation(self):
self.assertEqual(
ObjectB.objects.exclude(objecta__objectb__name=F("name")).count(), 0
)
def test_subquery_exclude_outerref(self):
qs = JobResponsibilities.objects.filter(
Exists(Responsibility.objects.exclude(jobs=OuterRef("job"))),
)
self.assertTrue(qs.exists())
self.r1.delete()
self.assertFalse(qs.exists())
def test_exclude_nullable_fields(self):
number = Number.objects.create(num=1, other_num=1)
Number.objects.create(num=2, other_num=2, another_num=2)
self.assertSequenceEqual(
Number.objects.exclude(other_num=F("another_num")),
[number],
)
self.assertSequenceEqual(
Number.objects.exclude(num=F("another_num")),
[number],
)
def test_exclude_multivalued_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertSequenceEqual(
Job.objects.exclude(responsibilities__description="Programming"),
[self.j1],
)
self.assertIn("exists", captured_queries[0]["sql"].lower())
def test_exclude_subquery(self):
subquery = JobResponsibilities.objects.filter(
responsibility__description="bar",
) | JobResponsibilities.objects.exclude(
job__responsibilities__description="foo",
)
self.assertCountEqual(
Job.objects.annotate(
responsibility=subquery.filter(job=OuterRef("name"),).values(
"id"
)[:1]
),
[self.j1, self.j2],
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_exclude_unsaved_o2o_object(self):
jack = Staff.objects.create(name="jack")
jack_staff = StaffUser.objects.create(staff=jack)
unsaved_object = Staff(name="jane")
self.assertIsNone(unsaved_object.pk)
self.assertSequenceEqual(
StaffUser.objects.exclude(staff=unsaved_object), [jack_staff]
)
def test_exclude_unsaved_object(self):
# These tests will catch ValueError in Django 5.0 when passing unsaved
# model instances to related filters becomes forbidden.
# msg = "Model instances passed to related filters must be saved."
company = Company.objects.create(name="Django")
msg = "Passing unsaved model instances to related filters is deprecated."
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.exclude(employer=Company(name="unsaved"))
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
Employment.objects.exclude(employer__in=[company, Company(name="unsaved")])
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
StaffUser.objects.exclude(staff=Staff(name="unsaved"))
class ExcludeTest17600(TestCase):
"""
Some regressiontests for ticket #17600. Some of these likely duplicate
other existing tests.
"""
@classmethod
def setUpTestData(cls):
# Create a few Orders.
cls.o1 = Order.objects.create(pk=1)
cls.o2 = Order.objects.create(pk=2)
cls.o3 = Order.objects.create(pk=3)
# Create some OrderItems for the first order with homogeneous
# status_id values
cls.oi1 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi2 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi3 = OrderItem.objects.create(order=cls.o1, status=1)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi4 = OrderItem.objects.create(order=cls.o2, status=1)
cls.oi5 = OrderItem.objects.create(order=cls.o2, status=2)
cls.oi6 = OrderItem.objects.create(order=cls.o2, status=3)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi7 = OrderItem.objects.create(order=cls.o3, status=2)
cls.oi8 = OrderItem.objects.create(order=cls.o3, status=3)
cls.oi9 = OrderItem.objects.create(order=cls.o3, status=4)
def test_exclude_plain(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(items__status=1),
[self.o3],
)
def test_exclude_plain_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(items__status=1).distinct(),
[self.o3],
)
def test_exclude_with_q_object_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(Q(items__status=1)).distinct(),
[self.o3],
)
def test_exclude_with_q_object_no_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertSequenceEqual(
Order.objects.exclude(Q(items__status=1)),
[self.o3],
)
def test_exclude_with_q_is_equal_to_plain_exclude(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1).distinct()),
list(Order.objects.exclude(Q(items__status=1)).distinct()),
)
def test_exclude_with_q_is_equal_to_plain_exclude_variation(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1)),
list(Order.objects.exclude(Q(items__status=1)).distinct()),
)
@unittest.expectedFailure
def test_only_orders_with_all_items_having_status_1(self):
"""
This should only return orders having ALL items set to status 1, or
those items not having any orders at all. The correct way to write
this query in SQL seems to be using two nested subqueries.
"""
self.assertQuerysetEqual(
Order.objects.exclude(~Q(items__status=1)).distinct(),
[self.o1],
)
class Exclude15786(TestCase):
"""Regression test for #15786"""
def test_ticket15786(self):
c1 = SimpleCategory.objects.create(name="c1")
c2 = SimpleCategory.objects.create(name="c2")
OneToOneCategory.objects.create(category=c1)
OneToOneCategory.objects.create(category=c2)
rel = CategoryRelationship.objects.create(first=c1, second=c2)
self.assertEqual(
CategoryRelationship.objects.exclude(
first__onetoonecategory=F("second__onetoonecategory")
).get(),
rel,
)
class NullInExcludeTest(TestCase):
@classmethod
def setUpTestData(cls):
NullableName.objects.create(name="i1")
NullableName.objects.create()
def test_null_in_exclude_qs(self):
none_val = "" if connection.features.interprets_empty_strings_as_nulls else None
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=[]),
["i1", none_val],
attrgetter("name"),
)
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=["i1"]),
[none_val],
attrgetter("name"),
)
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=["i3"]),
["i1", none_val],
attrgetter("name"),
)
inner_qs = NullableName.objects.filter(name="i1").values_list("name")
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=inner_qs),
[none_val],
attrgetter("name"),
)
# The inner queryset wasn't executed - it should be turned
# into subquery above
self.assertIs(inner_qs._result_cache, None)
@unittest.expectedFailure
def test_col_not_in_list_containing_null(self):
"""
The following case is not handled properly because
SQL's COL NOT IN (list containing null) handling is too weird to
abstract away.
"""
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=[None]), ["i1"], attrgetter("name")
)
def test_double_exclude(self):
self.assertEqual(
list(NullableName.objects.filter(~~Q(name="i1"))),
list(NullableName.objects.filter(Q(name="i1"))),
)
self.assertNotIn(
"IS NOT NULL", str(NullableName.objects.filter(~~Q(name="i1")).query)
)
class EmptyStringsAsNullTest(TestCase):
"""
Filtering on non-null character fields works as expected.
The reason for these tests is that Oracle treats '' as NULL, and this
can cause problems in query construction. Refs #17957.
"""
@classmethod
def setUpTestData(cls):
cls.nc = NamedCategory.objects.create(name="")
def test_direct_exclude(self):
self.assertQuerysetEqual(
NamedCategory.objects.exclude(name__in=["nonexistent"]),
[self.nc.pk],
attrgetter("pk"),
)
def test_joined_exclude(self):
self.assertQuerysetEqual(
DumbCategory.objects.exclude(namedcategory__name__in=["nonexistent"]),
[self.nc.pk],
attrgetter("pk"),
)
def test_21001(self):
foo = NamedCategory.objects.create(name="foo")
self.assertQuerysetEqual(
NamedCategory.objects.exclude(name=""), [foo.pk], attrgetter("pk")
)
class ProxyQueryCleanupTest(TestCase):
def test_evaluated_proxy_count(self):
"""
Generating the query string doesn't alter the query's state
in irreversible ways. Refs #18248.
"""
ProxyCategory.objects.create()
qs = ProxyCategory.objects.all()
self.assertEqual(qs.count(), 1)
str(qs.query)
self.assertEqual(qs.count(), 1)
class WhereNodeTest(SimpleTestCase):
class DummyNode:
def as_sql(self, compiler, connection):
return "dummy", []
class MockCompiler:
def compile(self, node):
return node.as_sql(self, connection)
def __call__(self, name):
return connection.ops.quote_name(name)
def test_empty_full_handling_conjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[NothingNode()])
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("", []))
w = WhereNode(children=[self.DummyNode(), self.DummyNode()])
self.assertEqual(w.as_sql(compiler, connection), ("(dummy AND dummy)", []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("NOT (dummy AND dummy)", []))
w = WhereNode(children=[NothingNode(), self.DummyNode()])
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("", []))
def test_empty_full_handling_disjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[NothingNode()], connector="OR")
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("", []))
w = WhereNode(children=[self.DummyNode(), self.DummyNode()], connector="OR")
self.assertEqual(w.as_sql(compiler, connection), ("(dummy OR dummy)", []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("NOT (dummy OR dummy)", []))
w = WhereNode(children=[NothingNode(), self.DummyNode()], connector="OR")
self.assertEqual(w.as_sql(compiler, connection), ("dummy", []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("NOT (dummy)", []))
def test_empty_nodes(self):
compiler = WhereNodeTest.MockCompiler()
empty_w = WhereNode()
w = WhereNode(children=[empty_w, empty_w])
self.assertEqual(w.as_sql(compiler, connection), ("", []))
w.negate()
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.connector = "OR"
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ("", []))
w = WhereNode(children=[empty_w, NothingNode()], connector="OR")
self.assertEqual(w.as_sql(compiler, connection), ("", []))
w = WhereNode(children=[empty_w, NothingNode()], connector="AND")
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
class QuerySetExceptionTests(SimpleTestCase):
def test_iter_exceptions(self):
qs = ExtraInfo.objects.only("author")
msg = "'ManyToOneRel' object has no attribute 'attname'"
with self.assertRaisesMessage(AttributeError, msg):
list(qs)
def test_invalid_order_by(self):
msg = "Cannot resolve keyword '*' into field. Choices are: created, id, name"
with self.assertRaisesMessage(FieldError, msg):
Article.objects.order_by("*")
def test_invalid_order_by_raw_column_alias(self):
msg = (
"Cannot resolve keyword 'queries_author.name' into field. Choices "
"are: cover, created, creator, creator_id, id, modified, name, "
"note, note_id, tags"
)
with self.assertRaisesMessage(FieldError, msg):
Item.objects.values("creator__name").order_by("queries_author.name")
def test_invalid_queryset_model(self):
msg = 'Cannot use QuerySet for "Article": Use a QuerySet for "ExtraInfo".'
with self.assertRaisesMessage(ValueError, msg):
list(Author.objects.filter(extra=Article.objects.all()))
class NullJoinPromotionOrTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.d1 = ModelD.objects.create(name="foo")
d2 = ModelD.objects.create(name="bar")
cls.a1 = ModelA.objects.create(name="a1", d=cls.d1)
c = ModelC.objects.create(name="c")
b = ModelB.objects.create(name="b", c=c)
cls.a2 = ModelA.objects.create(name="a2", b=b, d=d2)
def test_ticket_17886(self):
# The first Q-object is generating the match, the rest of the filters
# should not remove the match even if they do not match anything. The
# problem here was that b__name generates a LOUTER JOIN, then
# b__c__name generates join to c, which the ORM tried to promote but
# failed as that join isn't nullable.
q_obj = Q(d__name="foo") | Q(b__name="foo") | Q(b__c__name="foo")
qset = ModelA.objects.filter(q_obj)
self.assertEqual(list(qset), [self.a1])
# We generate one INNER JOIN to D. The join is direct and not nullable
# so we can use INNER JOIN for it. However, we can NOT use INNER JOIN
# for the b->c join, as a->b is nullable.
self.assertEqual(str(qset.query).count("INNER JOIN"), 1)
def test_isnull_filter_promotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count("LEFT OUTER"), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count("LEFT OUTER"), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count("LEFT OUTER"), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(list(qs), [self.a2])
def test_null_join_demotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=False) & Q(b__name__isnull=True))
self.assertIn(" INNER JOIN ", str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) & Q(b__name__isnull=False))
self.assertIn(" INNER JOIN ", str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=False) | Q(b__name__isnull=True))
self.assertIn(" LEFT OUTER JOIN ", str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) | Q(b__name__isnull=False))
self.assertIn(" LEFT OUTER JOIN ", str(qs.query))
def test_ticket_21366(self):
n = Note.objects.create(note="n", misc="m")
e = ExtraInfo.objects.create(info="info", note=n)
a = Author.objects.create(name="Author1", num=1, extra=e)
Ranking.objects.create(rank=1, author=a)
r1 = Report.objects.create(name="Foo", creator=a)
r2 = Report.objects.create(name="Bar")
Report.objects.create(name="Bar", creator=a)
qs = Report.objects.filter(
Q(creator__ranking__isnull=True) | Q(creator__ranking__rank=1, name="Foo")
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
self.assertEqual(str(qs.query).count(" JOIN "), 2)
self.assertSequenceEqual(qs.order_by("name"), [r2, r1])
def test_ticket_21748(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
i3 = Identifier.objects.create(name="i3")
Program.objects.create(identifier=i1)
Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
self.assertSequenceEqual(
Identifier.objects.filter(program=None, channel=None), [i3]
)
self.assertSequenceEqual(
Identifier.objects.exclude(program=None, channel=None).order_by("name"),
[i1, i2],
)
def test_ticket_21748_double_negated_and(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
Identifier.objects.create(name="i3")
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
# Check the ~~Q() (or equivalently .exclude(~Q)) works like Q() for
# join promotion.
qs1_doubleneg = Identifier.objects.exclude(
~Q(program__id=p1.id, channel__id=c1.id)
).order_by("pk")
qs1_filter = Identifier.objects.filter(
program__id=p1.id, channel__id=c1.id
).order_by("pk")
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(
str(qs1_filter.query).count("JOIN"), str(qs1_doubleneg.query).count("JOIN")
)
self.assertEqual(2, str(qs1_doubleneg.query).count("INNER JOIN"))
self.assertEqual(
str(qs1_filter.query).count("INNER JOIN"),
str(qs1_doubleneg.query).count("INNER JOIN"),
)
def test_ticket_21748_double_negated_or(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
Identifier.objects.create(name="i3")
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Test OR + doubleneg. The expected result is that channel is LOUTER
# joined, program INNER joined
qs1_filter = Identifier.objects.filter(
Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id)
).order_by("pk")
qs1_doubleneg = Identifier.objects.exclude(
~Q(Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id))
).order_by("pk")
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(
str(qs1_filter.query).count("JOIN"), str(qs1_doubleneg.query).count("JOIN")
)
self.assertEqual(1, str(qs1_doubleneg.query).count("INNER JOIN"))
self.assertEqual(
str(qs1_filter.query).count("INNER JOIN"),
str(qs1_doubleneg.query).count("INNER JOIN"),
)
def test_ticket_21748_complex_filter(self):
i1 = Identifier.objects.create(name="i1")
i2 = Identifier.objects.create(name="i2")
Identifier.objects.create(name="i3")
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Finally, a more complex case, one time in a way where each
# NOT is pushed to lowest level in the boolean tree, and
# another query where this isn't done.
qs1 = Identifier.objects.filter(
~Q(~Q(program__id=p2.id, channel__id=c1.id) & Q(program__id=p1.id))
).order_by("pk")
qs2 = Identifier.objects.filter(
Q(Q(program__id=p2.id, channel__id=c1.id) | ~Q(program__id=p1.id))
).order_by("pk")
self.assertQuerysetEqual(qs1, qs2, lambda x: x)
self.assertEqual(str(qs1.query).count("JOIN"), str(qs2.query).count("JOIN"))
self.assertEqual(0, str(qs1.query).count("INNER JOIN"))
self.assertEqual(
str(qs1.query).count("INNER JOIN"), str(qs2.query).count("INNER JOIN")
)
class ReverseJoinTrimmingTest(TestCase):
def test_reverse_trimming(self):
# We don't accidentally trim reverse joins - we can't know if there is
# anything on the other side of the join, so trimming reverse joins
# can't be done, ever.
t = Tag.objects.create()
qs = Tag.objects.filter(annotation__tag=t.pk)
self.assertIn("INNER JOIN", str(qs.query))
self.assertEqual(list(qs), [])
class JoinReuseTest(TestCase):
"""
The queries reuse joins sensibly (for example, direct joins
are always reused).
"""
def test_fk_reuse(self):
qs = Annotation.objects.filter(tag__name="foo").filter(tag__name="bar")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_select_related(self):
qs = Annotation.objects.filter(tag__name="foo").select_related("tag")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_annotation(self):
qs = Annotation.objects.filter(tag__name="foo").annotate(cnt=Count("tag__name"))
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_disjunction(self):
qs = Annotation.objects.filter(Q(tag__name="foo") | Q(tag__name="bar"))
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_fk_reuse_order_by(self):
qs = Annotation.objects.filter(tag__name="foo").order_by("tag__name")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_revo2o_reuse(self):
qs = Detail.objects.filter(member__name="foo").filter(member__name="foo")
self.assertEqual(str(qs.query).count("JOIN"), 1)
def test_revfk_noreuse(self):
qs = Author.objects.filter(report__name="r4").filter(report__name="r1")
self.assertEqual(str(qs.query).count("JOIN"), 2)
def test_inverted_q_across_relations(self):
"""
When a trimmable join is specified in the query (here school__), the
ORM detects it and removes unnecessary joins. The set of reusable joins
are updated after trimming the query so that other lookups don't
consider that the outer query's filters are in effect for the subquery
(#26551).
"""
springfield_elementary = School.objects.create()
hogward = School.objects.create()
Student.objects.create(school=springfield_elementary)
hp = Student.objects.create(school=hogward)
Classroom.objects.create(school=hogward, name="Potion")
Classroom.objects.create(school=springfield_elementary, name="Main")
qs = Student.objects.filter(
~(
Q(school__classroom__name="Main")
& Q(school__classroom__has_blackboard=None)
)
)
self.assertSequenceEqual(qs, [hp])
class DisjunctionPromotionTests(TestCase):
def test_disjunction_promotion_select_related(self):
fk1 = FK1.objects.create(f1="f1", f2="f2")
basea = BaseA.objects.create(a=fk1)
qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2))
self.assertEqual(str(qs.query).count(" JOIN "), 0)
qs = qs.select_related("a", "b")
self.assertEqual(str(qs.query).count(" INNER JOIN "), 0)
self.assertEqual(str(qs.query).count(" LEFT OUTER JOIN "), 2)
with self.assertNumQueries(1):
self.assertSequenceEqual(qs, [basea])
self.assertEqual(qs[0].a, fk1)
self.assertIs(qs[0].b, None)
def test_disjunction_promotion1(self):
# Pre-existing join, add two ORed filters to the same join,
# all joins can be INNER JOINS.
qs = BaseA.objects.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = qs.filter(Q(b__f1="foo") | Q(b__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
# Reverse the order of AND and OR filters.
qs = BaseA.objects.filter(Q(b__f1="foo") | Q(b__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = qs.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
def test_disjunction_promotion2(self):
qs = BaseA.objects.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
# Now we have two different joins in an ORed condition, these
# must be OUTER joins. The pre-existing join should remain INNER.
qs = qs.filter(Q(b__f1="foo") | Q(c__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
# Reverse case.
qs = BaseA.objects.filter(Q(b__f1="foo") | Q(c__f2="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
qs = qs.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
def test_disjunction_promotion3(self):
qs = BaseA.objects.filter(a__f2="bar")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
# The ANDed a__f2 filter allows us to use keep using INNER JOIN
# even inside the ORed case. If the join to a__ returns nothing,
# the ANDed filter for a__f2 can't be true.
qs = qs.filter(Q(a__f1="foo") | Q(b__f2="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
def test_disjunction_promotion3_demote(self):
# This one needs demotion logic: the first filter causes a to be
# outer joined, the second filter makes it inner join again.
qs = BaseA.objects.filter(Q(a__f1="foo") | Q(b__f2="foo")).filter(a__f2="bar")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
def test_disjunction_promotion4_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("JOIN"), 0)
# Demote needed for the "a" join. It is marked as outer join by
# above filter (even if it is trimmed away).
qs = qs.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
def test_disjunction_promotion4(self):
qs = BaseA.objects.filter(a__f1="foo")
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
def test_disjunction_promotion5_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
# Note that the above filters on a force the join to an
# inner join even if it is trimmed.
self.assertEqual(str(qs.query).count("JOIN"), 0)
qs = qs.filter(Q(a__f1="foo") | Q(b__f1="foo"))
# So, now the a__f1 join doesn't need promotion.
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
# But b__f1 does.
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
qs = BaseA.objects.filter(Q(a__f1="foo") | Q(b__f1="foo"))
# Now the join to a is created as LOUTER
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
def test_disjunction_promotion6(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("JOIN"), 0)
qs = BaseA.objects.filter(Q(a__f1="foo") & Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 0)
qs = BaseA.objects.filter(Q(a__f1="foo") & Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 0)
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("INNER JOIN"), 2)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 0)
def test_disjunction_promotion7(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count("JOIN"), 0)
qs = BaseA.objects.filter(Q(a__f1="foo") | (Q(b__f1="foo") & Q(a__f1="bar")))
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
qs = BaseA.objects.filter(
(Q(a__f1="foo") | Q(b__f1="foo")) & (Q(a__f1="bar") | Q(c__f1="foo"))
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 3)
self.assertEqual(str(qs.query).count("INNER JOIN"), 0)
qs = BaseA.objects.filter(
Q(a__f1="foo") | Q(a__f1="bar") & (Q(b__f1="bar") | Q(c__f1="foo"))
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
def test_disjunction_promotion_fexpression(self):
qs = BaseA.objects.filter(Q(a__f1=F("b__f1")) | Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 1)
self.assertEqual(str(qs.query).count("INNER JOIN"), 1)
qs = BaseA.objects.filter(Q(a__f1=F("c__f1")) | Q(b__f1="foo"))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 3)
qs = BaseA.objects.filter(
Q(a__f1=F("b__f1")) | Q(a__f2=F("b__f2")) | Q(c__f1="foo")
)
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 3)
qs = BaseA.objects.filter(Q(a__f1=F("c__f1")) | (Q(pk=1) & Q(pk=2)))
self.assertEqual(str(qs.query).count("LEFT OUTER JOIN"), 2)
self.assertEqual(str(qs.query).count("INNER JOIN"), 0)
class ManyToManyExcludeTest(TestCase):
def test_exclude_many_to_many(self):
i_extra = Identifier.objects.create(name="extra")
i_program = Identifier.objects.create(name="program")
program = Program.objects.create(identifier=i_program)
i_channel = Identifier.objects.create(name="channel")
channel = Channel.objects.create(identifier=i_channel)
channel.programs.add(program)
# channel contains 'program1', so all Identifiers except that one
# should be returned
self.assertSequenceEqual(
Identifier.objects.exclude(program__channel=channel).order_by("name"),
[i_channel, i_extra],
)
self.assertSequenceEqual(
Identifier.objects.exclude(program__channel=None).order_by("name"),
[i_program],
)
def test_ticket_12823(self):
pg3 = Page.objects.create(text="pg3")
pg2 = Page.objects.create(text="pg2")
pg1 = Page.objects.create(text="pg1")
pa1 = Paragraph.objects.create(text="pa1")
pa1.page.set([pg1, pg2])
pa2 = Paragraph.objects.create(text="pa2")
pa2.page.set([pg2, pg3])
pa3 = Paragraph.objects.create(text="pa3")
ch1 = Chapter.objects.create(title="ch1", paragraph=pa1)
ch2 = Chapter.objects.create(title="ch2", paragraph=pa2)
ch3 = Chapter.objects.create(title="ch3", paragraph=pa3)
b1 = Book.objects.create(title="b1", chapter=ch1)
b2 = Book.objects.create(title="b2", chapter=ch2)
b3 = Book.objects.create(title="b3", chapter=ch3)
q = Book.objects.exclude(chapter__paragraph__page__text="pg1")
self.assertNotIn("IS NOT NULL", str(q.query))
self.assertEqual(len(q), 2)
self.assertNotIn(b1, q)
self.assertIn(b2, q)
self.assertIn(b3, q)
class RelabelCloneTest(TestCase):
def test_ticket_19964(self):
my1 = MyObject.objects.create(data="foo")
my1.parent = my1
my1.save()
my2 = MyObject.objects.create(data="bar", parent=my1)
parents = MyObject.objects.filter(parent=F("id"))
children = MyObject.objects.filter(parent__in=parents).exclude(parent=F("id"))
self.assertEqual(list(parents), [my1])
# Evaluating the children query (which has parents as part of it) does
# not change results for the parents query.
self.assertEqual(list(children), [my2])
self.assertEqual(list(parents), [my1])
class Ticket20101Tests(TestCase):
def test_ticket_20101(self):
"""
Tests QuerySet ORed combining in exclude subquery case.
"""
t = Tag.objects.create(name="foo")
a1 = Annotation.objects.create(tag=t, name="a1")
a2 = Annotation.objects.create(tag=t, name="a2")
a3 = Annotation.objects.create(tag=t, name="a3")
n = Note.objects.create(note="foo", misc="bar")
qs1 = Note.objects.exclude(annotation__in=[a1, a2])
qs2 = Note.objects.filter(annotation__in=[a3])
self.assertIn(n, qs1)
self.assertNotIn(n, qs2)
self.assertIn(n, (qs1 | qs2))
class EmptyStringPromotionTests(SimpleTestCase):
def test_empty_string_promotion(self):
qs = RelatedObject.objects.filter(single__name="")
if connection.features.interprets_empty_strings_as_nulls:
self.assertIn("LEFT OUTER JOIN", str(qs.query))
else:
self.assertNotIn("LEFT OUTER JOIN", str(qs.query))
class ValuesSubqueryTests(TestCase):
def test_values_in_subquery(self):
# If a values() queryset is used, then the given values
# will be used instead of forcing use of the relation's field.
o1 = Order.objects.create(id=-2)
o2 = Order.objects.create(id=-1)
oi1 = OrderItem.objects.create(order=o1, status=0)
oi1.status = oi1.pk
oi1.save()
OrderItem.objects.create(order=o2, status=0)
# The query below should match o1 as it has related order_item
# with id == status.
self.assertSequenceEqual(
Order.objects.filter(items__in=OrderItem.objects.values_list("status")),
[o1],
)
class DoubleInSubqueryTests(TestCase):
def test_double_subquery_in(self):
lfa1 = LeafA.objects.create(data="foo")
lfa2 = LeafA.objects.create(data="bar")
lfb1 = LeafB.objects.create(data="lfb1")
lfb2 = LeafB.objects.create(data="lfb2")
Join.objects.create(a=lfa1, b=lfb1)
Join.objects.create(a=lfa2, b=lfb2)
leaf_as = LeafA.objects.filter(data="foo").values_list("pk", flat=True)
joins = Join.objects.filter(a__in=leaf_as).values_list("b__id", flat=True)
qs = LeafB.objects.filter(pk__in=joins)
self.assertSequenceEqual(qs, [lfb1])
class Ticket18785Tests(SimpleTestCase):
def test_ticket_18785(self):
# Test join trimming from ticket18785
qs = (
Item.objects.exclude(note__isnull=False)
.filter(name="something", creator__extra__isnull=True)
.order_by()
)
self.assertEqual(1, str(qs.query).count("INNER JOIN"))
self.assertEqual(0, str(qs.query).count("OUTER JOIN"))
class Ticket20788Tests(TestCase):
def test_ticket_20788(self):
Paragraph.objects.create()
paragraph = Paragraph.objects.create()
page = paragraph.page.create()
chapter = Chapter.objects.create(paragraph=paragraph)
Book.objects.create(chapter=chapter)
paragraph2 = Paragraph.objects.create()
Page.objects.create()
chapter2 = Chapter.objects.create(paragraph=paragraph2)
book2 = Book.objects.create(chapter=chapter2)
sentences_not_in_pub = Book.objects.exclude(chapter__paragraph__page=page)
self.assertSequenceEqual(sentences_not_in_pub, [book2])
class Ticket12807Tests(TestCase):
def test_ticket_12807(self):
p1 = Paragraph.objects.create()
p2 = Paragraph.objects.create()
# The ORed condition below should have no effect on the query - the
# ~Q(pk__in=[]) will always be True.
qs = Paragraph.objects.filter((Q(pk=p2.pk) | ~Q(pk__in=[])) & Q(pk=p1.pk))
self.assertSequenceEqual(qs, [p1])
class RelatedLookupTypeTests(TestCase):
error = 'Cannot query "%s": Must be "%s" instance.'
@classmethod
def setUpTestData(cls):
cls.oa = ObjectA.objects.create(name="oa")
cls.poa = ProxyObjectA.objects.get(name="oa")
cls.coa = ChildObjectA.objects.create(name="coa")
cls.wrong_type = Order.objects.create(id=cls.oa.pk)
cls.ob = ObjectB.objects.create(name="ob", objecta=cls.oa, num=1)
cls.pob1 = ProxyObjectB.objects.create(name="pob", objecta=cls.oa, num=2)
cls.pob = ProxyObjectB.objects.all()
cls.c = ObjectC.objects.create(childobjecta=cls.coa)
def test_wrong_type_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup.
"""
# Passing incorrect object type
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)
):
ObjectB.objects.get(objecta=self.wrong_type)
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)
):
ObjectB.objects.filter(objecta__in=[self.wrong_type])
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)
):
ObjectB.objects.filter(objecta=self.wrong_type)
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)
):
ObjectA.objects.filter(objectb__in=[self.wrong_type, self.ob])
# Passing an object of the class on which query is done.
with self.assertRaisesMessage(
ValueError, self.error % (self.ob, ObjectA._meta.object_name)
):
ObjectB.objects.filter(objecta__in=[self.poa, self.ob])
with self.assertRaisesMessage(
ValueError, self.error % (self.ob, ChildObjectA._meta.object_name)
):
ObjectC.objects.exclude(childobjecta__in=[self.coa, self.ob])
def test_wrong_backward_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup for backward relations.
"""
with self.assertRaisesMessage(
ValueError, self.error % (self.oa, ObjectB._meta.object_name)
):
ObjectA.objects.filter(objectb__in=[self.oa, self.ob])
with self.assertRaisesMessage(
ValueError, self.error % (self.oa, ObjectB._meta.object_name)
):
ObjectA.objects.exclude(objectb=self.oa)
with self.assertRaisesMessage(
ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)
):
ObjectA.objects.get(objectb=self.wrong_type)
def test_correct_lookup(self):
"""
When passing proxy model objects, child objects, or parent objects,
lookups work fine.
"""
out_a = [self.oa]
out_b = [self.ob, self.pob1]
out_c = [self.c]
# proxy model objects
self.assertSequenceEqual(
ObjectB.objects.filter(objecta=self.poa).order_by("name"), out_b
)
self.assertSequenceEqual(
ObjectA.objects.filter(objectb__in=self.pob).order_by("pk"), out_a * 2
)
# child objects
self.assertSequenceEqual(ObjectB.objects.filter(objecta__in=[self.coa]), [])
self.assertSequenceEqual(
ObjectB.objects.filter(objecta__in=[self.poa, self.coa]).order_by("name"),
out_b,
)
self.assertSequenceEqual(
ObjectB.objects.filter(objecta__in=iter([self.poa, self.coa])).order_by(
"name"
),
out_b,
)
# parent objects
self.assertSequenceEqual(ObjectC.objects.exclude(childobjecta=self.oa), out_c)
# QuerySet related object type checking shouldn't issue queries
# (the querysets aren't evaluated here, hence zero queries) (#23266).
with self.assertNumQueries(0):
ObjectB.objects.filter(objecta__in=ObjectA.objects.all())
def test_values_queryset_lookup(self):
"""
ValueQuerySets are not checked for compatibility with the lookup field.
"""
# Make sure the num and objecta field values match.
ob = ObjectB.objects.get(name="ob")
ob.num = ob.objecta.pk
ob.save()
pob = ObjectB.objects.get(name="pob")
pob.num = pob.objecta.pk
pob.save()
self.assertSequenceEqual(
ObjectB.objects.filter(
objecta__in=ObjectB.objects.values_list("num")
).order_by("pk"),
[ob, pob],
)
class Ticket14056Tests(TestCase):
def test_ticket_14056(self):
s1 = SharedConnection.objects.create(data="s1")
s2 = SharedConnection.objects.create(data="s2")
s3 = SharedConnection.objects.create(data="s3")
PointerA.objects.create(connection=s2)
expected_ordering = (
[s1, s3, s2] if connection.features.nulls_order_largest else [s2, s1, s3]
)
self.assertSequenceEqual(
SharedConnection.objects.order_by("-pointera__connection", "pk"),
expected_ordering,
)
class Ticket20955Tests(TestCase):
def test_ticket_20955(self):
jack = Staff.objects.create(name="jackstaff")
jackstaff = StaffUser.objects.create(staff=jack)
jill = Staff.objects.create(name="jillstaff")
jillstaff = StaffUser.objects.create(staff=jill)
task = Task.objects.create(creator=jackstaff, owner=jillstaff, title="task")
task_get = Task.objects.get(pk=task.pk)
# Load data so that assertNumQueries doesn't complain about the get
# version's queries.
task_get.creator.staffuser.staff
task_get.owner.staffuser.staff
qs = Task.objects.select_related(
"creator__staffuser__staff", "owner__staffuser__staff"
)
self.assertEqual(str(qs.query).count(" JOIN "), 6)
task_select_related = qs.get(pk=task.pk)
with self.assertNumQueries(0):
self.assertEqual(
task_select_related.creator.staffuser.staff,
task_get.creator.staffuser.staff,
)
self.assertEqual(
task_select_related.owner.staffuser.staff,
task_get.owner.staffuser.staff,
)
class Ticket21203Tests(TestCase):
def test_ticket_21203(self):
p = Ticket21203Parent.objects.create(parent_bool=True)
c = Ticket21203Child.objects.create(parent=p)
qs = Ticket21203Child.objects.select_related("parent").defer("parent__created")
self.assertSequenceEqual(qs, [c])
self.assertIs(qs[0].parent.parent_bool, True)
class ValuesJoinPromotionTests(TestCase):
def test_values_no_promotion_for_existing(self):
qs = Node.objects.filter(parent__parent__isnull=False)
self.assertIn(" INNER JOIN ", str(qs.query))
qs = qs.values("parent__parent__id")
self.assertIn(" INNER JOIN ", str(qs.query))
# Make sure there is a left outer join without the filter.
qs = Node.objects.values("parent__parent__id")
self.assertIn(" LEFT OUTER JOIN ", str(qs.query))
def test_non_nullable_fk_not_promoted(self):
qs = ObjectB.objects.values("objecta__name")
self.assertIn(" INNER JOIN ", str(qs.query))
def test_ticket_21376(self):
a = ObjectA.objects.create()
ObjectC.objects.create(objecta=a)
qs = ObjectC.objects.filter(
Q(objecta=a) | Q(objectb__objecta=a),
)
qs = qs.filter(
Q(objectb=1) | Q(objecta=a),
)
self.assertEqual(qs.count(), 1)
tblname = connection.ops.quote_name(ObjectB._meta.db_table)
self.assertIn(" LEFT OUTER JOIN %s" % tblname, str(qs.query))
class ForeignKeyToBaseExcludeTests(TestCase):
def test_ticket_21787(self):
sc1 = SpecialCategory.objects.create(special_name="sc1", name="sc1")
sc2 = SpecialCategory.objects.create(special_name="sc2", name="sc2")
sc3 = SpecialCategory.objects.create(special_name="sc3", name="sc3")
c1 = CategoryItem.objects.create(category=sc1)
CategoryItem.objects.create(category=sc2)
self.assertSequenceEqual(
SpecialCategory.objects.exclude(categoryitem__id=c1.pk).order_by("name"),
[sc2, sc3],
)
self.assertSequenceEqual(
SpecialCategory.objects.filter(categoryitem__id=c1.pk), [sc1]
)
class ReverseM2MCustomPkTests(TestCase):
def test_ticket_21879(self):
cpt1 = CustomPkTag.objects.create(id="cpt1", tag="cpt1")
cp1 = CustomPk.objects.create(name="cp1", extra="extra")
cp1.custompktag_set.add(cpt1)
self.assertSequenceEqual(CustomPk.objects.filter(custompktag=cpt1), [cp1])
self.assertSequenceEqual(CustomPkTag.objects.filter(custom_pk=cp1), [cpt1])
class Ticket22429Tests(TestCase):
def test_ticket_22429(self):
sc1 = School.objects.create()
st1 = Student.objects.create(school=sc1)
sc2 = School.objects.create()
st2 = Student.objects.create(school=sc2)
cr = Classroom.objects.create(school=sc1)
cr.students.add(st1)
queryset = Student.objects.filter(~Q(classroom__school=F("school")))
self.assertSequenceEqual(queryset, [st2])
class Ticket23605Tests(TestCase):
def test_ticket_23605(self):
# Test filtering on a complicated q-object from ticket's report.
# The query structure is such that we have multiple nested subqueries.
# The original problem was that the inner queries weren't relabeled
# correctly.
# See also #24090.
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=10000.0)
Ticket23605B.objects.create(
field_b0=10000.0, field_b1=True, modelc_fk=c1, modela_fk=a1
)
complex_q = Q(
pk__in=Ticket23605A.objects.filter(
Q(
# True for a1 as field_b0 = 10000, field_c0=10000
# False for a2 as no ticket23605b found
ticket23605b__field_b0__gte=1000000
/ F("ticket23605b__modelc_fk__field_c0")
)
&
# True for a1 (field_b1=True)
Q(ticket23605b__field_b1=True)
& ~Q(
ticket23605b__pk__in=Ticket23605B.objects.filter(
~(
# Same filters as above commented filters, but
# double-negated (one for Q() above, one for
# parentheses). So, again a1 match, a2 not.
Q(field_b1=True)
& Q(field_b0__gte=1000000 / F("modelc_fk__field_c0"))
)
)
)
).filter(ticket23605b__field_b1=True)
)
qs1 = Ticket23605A.objects.filter(complex_q)
self.assertSequenceEqual(qs1, [a1])
qs2 = Ticket23605A.objects.exclude(complex_q)
self.assertSequenceEqual(qs2, [a2])
class TestTicket24279(TestCase):
def test_ticket_24278(self):
School.objects.create()
qs = School.objects.filter(Q(pk__in=()) | Q())
self.assertQuerysetEqual(qs, [])
class TestInvalidValuesRelation(SimpleTestCase):
def test_invalid_values(self):
msg = "Field 'id' expected a number but got 'abc'."
with self.assertRaisesMessage(ValueError, msg):
Annotation.objects.filter(tag="abc")
with self.assertRaisesMessage(ValueError, msg):
Annotation.objects.filter(tag__in=[123, "abc"])
class TestTicket24605(TestCase):
def test_ticket_24605(self):
"""
Subquery table names should be quoted.
"""
i1 = Individual.objects.create(alive=True)
RelatedIndividual.objects.create(related=i1)
i2 = Individual.objects.create(alive=False)
RelatedIndividual.objects.create(related=i2)
i3 = Individual.objects.create(alive=True)
i4 = Individual.objects.create(alive=False)
self.assertSequenceEqual(
Individual.objects.filter(
Q(alive=False), Q(related_individual__isnull=True)
),
[i4],
)
self.assertSequenceEqual(
Individual.objects.exclude(
Q(alive=False), Q(related_individual__isnull=True)
).order_by("pk"),
[i1, i2, i3],
)
class Ticket23622Tests(TestCase):
@skipUnlessDBFeature("can_distinct_on_fields")
def test_ticket_23622(self):
"""
Make sure __pk__in and __in work the same for related fields when
using a distinct on subquery.
"""
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=0.0)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=123,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=23,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=234,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1,
field_b0=12,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=567,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=76,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=7,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2,
field_b0=56,
field_b1=True,
modelc_fk=c1,
)
qx = Q(
ticket23605b__pk__in=Ticket23605B.objects.order_by(
"modela_fk", "-field_b1"
).distinct("modela_fk")
) & Q(ticket23605b__field_b0__gte=300)
qy = Q(
ticket23605b__in=Ticket23605B.objects.order_by(
"modela_fk", "-field_b1"
).distinct("modela_fk")
) & Q(ticket23605b__field_b0__gte=300)
self.assertEqual(
set(Ticket23605A.objects.filter(qx).values_list("pk", flat=True)),
set(Ticket23605A.objects.filter(qy).values_list("pk", flat=True)),
)
self.assertSequenceEqual(Ticket23605A.objects.filter(qx), [a2])
|
f0542cb9c0f149fb2997464bbce58d9c8eda09d961f937804369f39a98397827 | import json
import unittest
import xml.etree.ElementTree
from django.db import NotSupportedError, connection, transaction
from django.db.models import Count
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from .models import Tag
@skipUnlessDBFeature("supports_explaining_query_execution")
class ExplainTests(TestCase):
def test_basic(self):
querysets = [
Tag.objects.filter(name="test"),
Tag.objects.filter(name="test").select_related("parent"),
Tag.objects.filter(name="test").prefetch_related("children"),
Tag.objects.filter(name="test").annotate(Count("children")),
Tag.objects.filter(name="test").values_list("name"),
Tag.objects.order_by().union(Tag.objects.order_by().filter(name="test")),
]
if connection.features.has_select_for_update:
querysets.append(Tag.objects.select_for_update().filter(name="test"))
supported_formats = connection.features.supported_explain_formats
all_formats = (
(None,)
+ tuple(supported_formats)
+ tuple(f.lower() for f in supported_formats)
)
for idx, queryset in enumerate(querysets):
for format in all_formats:
with self.subTest(format=format, queryset=idx):
with self.assertNumQueries(1) as captured_queries:
result = queryset.explain(format=format)
self.assertTrue(
captured_queries[0]["sql"].startswith(
connection.ops.explain_prefix
)
)
self.assertIsInstance(result, str)
self.assertTrue(result)
if not format:
continue
if format.lower() == "xml":
try:
xml.etree.ElementTree.fromstring(result)
except xml.etree.ElementTree.ParseError as e:
self.fail(
f"QuerySet.explain() result is not valid XML: {e}"
)
elif format.lower() == "json":
try:
json.loads(result)
except json.JSONDecodeError as e:
self.fail(
f"QuerySet.explain() result is not valid JSON: {e}"
)
def test_unknown_options(self):
with self.assertRaisesMessage(ValueError, "Unknown options: TEST, TEST2"):
Tag.objects.explain(**{"TEST": 1, "TEST2": 1})
def test_unknown_format(self):
msg = "DOES NOT EXIST is not a recognized format."
if connection.features.supported_explain_formats:
msg += " Allowed formats: %s" % ", ".join(
sorted(connection.features.supported_explain_formats)
)
else:
msg += f" {connection.display_name} does not support any formats."
with self.assertRaisesMessage(ValueError, msg):
Tag.objects.explain(format="does not exist")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_postgres_options(self):
qs = Tag.objects.filter(name="test")
test_options = [
{"COSTS": False, "BUFFERS": True, "ANALYZE": True},
{"costs": False, "buffers": True, "analyze": True},
{"verbose": True, "timing": True, "analyze": True},
{"verbose": False, "timing": False, "analyze": True},
{"summary": True},
]
if connection.features.is_postgresql_12:
test_options.append({"settings": True})
if connection.features.is_postgresql_13:
test_options.append({"analyze": True, "wal": True})
for options in test_options:
with self.subTest(**options), transaction.atomic():
with CaptureQueriesContext(connection) as captured_queries:
qs.explain(format="text", **options)
self.assertEqual(len(captured_queries), 1)
for name, value in options.items():
option = "{} {}".format(name.upper(), "true" if value else "false")
self.assertIn(option, captured_queries[0]["sql"])
def test_option_sql_injection(self):
qs = Tag.objects.filter(name="test")
options = {"SUMMARY true) SELECT 1; --": True}
msg = "Invalid option name: 'SUMMARY true) SELECT 1; --'"
with self.assertRaisesMessage(ValueError, msg):
qs.explain(**options)
def test_invalid_option_names(self):
qs = Tag.objects.filter(name="test")
tests = [
'opt"ion',
"o'ption",
"op`tion",
"opti on",
"option--",
"optio\tn",
"o\nption",
"option;",
"你 好",
# [] are used by MSSQL.
"option[",
"option]",
]
for invalid_option in tests:
with self.subTest(invalid_option):
msg = f"Invalid option name: {invalid_option!r}"
with self.assertRaisesMessage(ValueError, msg):
qs.explain(**{invalid_option: True})
@unittest.skipUnless(connection.vendor == "mysql", "MySQL specific")
def test_mysql_text_to_traditional(self):
# Ensure these cached properties are initialized to prevent queries for
# the MariaDB or MySQL version during the QuerySet evaluation.
connection.features.supported_explain_formats
with CaptureQueriesContext(connection) as captured_queries:
Tag.objects.filter(name="test").explain(format="text")
self.assertEqual(len(captured_queries), 1)
self.assertIn("FORMAT=TRADITIONAL", captured_queries[0]["sql"])
@unittest.skipUnless(
connection.vendor == "mysql", "MariaDB and MySQL >= 8.0.18 specific."
)
def test_mysql_analyze(self):
qs = Tag.objects.filter(name="test")
with CaptureQueriesContext(connection) as captured_queries:
qs.explain(analyze=True)
self.assertEqual(len(captured_queries), 1)
prefix = "ANALYZE " if connection.mysql_is_mariadb else "EXPLAIN ANALYZE "
self.assertTrue(captured_queries[0]["sql"].startswith(prefix))
with CaptureQueriesContext(connection) as captured_queries:
qs.explain(analyze=True, format="JSON")
self.assertEqual(len(captured_queries), 1)
if connection.mysql_is_mariadb:
self.assertIn("FORMAT=JSON", captured_queries[0]["sql"])
else:
self.assertNotIn("FORMAT=JSON", captured_queries[0]["sql"])
@skipIfDBFeature("supports_explaining_query_execution")
class ExplainUnsupportedTests(TestCase):
def test_message(self):
msg = "This backend does not support explaining query execution."
with self.assertRaisesMessage(NotSupportedError, msg):
Tag.objects.filter(name="test").explain()
|
f859e00dfacd3ff1bb0f052b8078bd94975355023b5406bf8b7cc4a0916ec9da | import datetime
from unittest import skipUnless
from django.db import connection
from django.db.models import CASCADE, ForeignKey, Index, Q
from django.db.models.functions import Lower
from django.test import (
TestCase,
TransactionTestCase,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.utils import override_settings
from django.utils import timezone
from .models import (
Article,
ArticleTranslation,
IndexedArticle2,
IndexTogetherSingleList,
)
class SchemaIndexesTests(TestCase):
"""
Test index handling by the db.backends.schema infrastructure.
"""
def test_index_name_hash(self):
"""
Index names should be deterministic.
"""
editor = connection.schema_editor()
index_name = editor._create_index_name(
table_name=Article._meta.db_table,
column_names=("c1",),
suffix="123",
)
self.assertEqual(index_name, "indexes_article_c1_a52bd80b123")
def test_index_name(self):
"""
Index names on the built-in database backends::
* Are truncated as needed.
* Include all the column names.
* Include a deterministic hash.
"""
long_name = "l%sng" % ("o" * 100)
editor = connection.schema_editor()
index_name = editor._create_index_name(
table_name=Article._meta.db_table,
column_names=("c1", "c2", long_name),
suffix="ix",
)
expected = {
"mysql": "indexes_article_c1_c2_looooooooooooooooooo_255179b2ix",
"oracle": "indexes_a_c1_c2_loo_255179b2ix",
"postgresql": "indexes_article_c1_c2_loooooooooooooooooo_255179b2ix",
"sqlite": "indexes_article_c1_c2_l%sng_255179b2ix" % ("o" * 100),
}
if connection.vendor not in expected:
self.skipTest(
"This test is only supported on the built-in database backends."
)
self.assertEqual(index_name, expected[connection.vendor])
def test_index_together(self):
editor = connection.schema_editor()
index_sql = [str(statement) for statement in editor._model_indexes_sql(Article)]
self.assertEqual(len(index_sql), 1)
# Ensure the index name is properly quoted
self.assertIn(
connection.ops.quote_name(
editor._create_index_name(
Article._meta.db_table, ["headline", "pub_date"], suffix="_idx"
)
),
index_sql[0],
)
def test_index_together_single_list(self):
# Test for using index_together with a single list (#22172)
index_sql = connection.schema_editor()._model_indexes_sql(
IndexTogetherSingleList
)
self.assertEqual(len(index_sql), 1)
def test_columns_list_sql(self):
index = Index(fields=["headline"], name="whitespace_idx")
editor = connection.schema_editor()
self.assertIn(
"(%s)" % editor.quote_name("headline"),
str(index.create_sql(Article, editor)),
)
@skipUnlessDBFeature("supports_index_column_ordering")
def test_descending_columns_list_sql(self):
index = Index(fields=["-headline"], name="whitespace_idx")
editor = connection.schema_editor()
self.assertIn(
"(%s DESC)" % editor.quote_name("headline"),
str(index.create_sql(Article, editor)),
)
class SchemaIndexesNotPostgreSQLTests(TransactionTestCase):
available_apps = ["indexes"]
def test_create_index_ignores_opclasses(self):
index = Index(
name="test_ops_class",
fields=["headline"],
opclasses=["varchar_pattern_ops"],
)
with connection.schema_editor() as editor:
# This would error if opclasses weren't ignored.
editor.add_index(IndexedArticle2, index)
# The `condition` parameter is ignored by databases that don't support partial
# indexes.
@skipIfDBFeature("supports_partial_indexes")
class PartialIndexConditionIgnoredTests(TransactionTestCase):
available_apps = ["indexes"]
def test_condition_ignored(self):
index = Index(
name="test_condition_ignored",
fields=["published"],
condition=Q(published=True),
)
with connection.schema_editor() as editor:
# This would error if condition weren't ignored.
editor.add_index(Article, index)
self.assertNotIn(
"WHERE %s" % editor.quote_name("published"),
str(index.create_sql(Article, editor)),
)
@skipUnless(connection.vendor == "postgresql", "PostgreSQL tests")
class SchemaIndexesPostgreSQLTests(TransactionTestCase):
available_apps = ["indexes"]
get_opclass_query = """
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = '%s'
"""
def test_text_indexes(self):
"""Test creation of PostgreSQL-specific text indexes (#12234)"""
from .models import IndexedArticle
index_sql = [
str(statement)
for statement in connection.schema_editor()._model_indexes_sql(
IndexedArticle
)
]
self.assertEqual(len(index_sql), 5)
self.assertIn('("headline" varchar_pattern_ops)', index_sql[1])
self.assertIn('("body" text_pattern_ops)', index_sql[3])
# unique=True and db_index=True should only create the varchar-specific
# index (#19441).
self.assertIn('("slug" varchar_pattern_ops)', index_sql[4])
def test_virtual_relation_indexes(self):
"""Test indexes are not created for related objects"""
index_sql = connection.schema_editor()._model_indexes_sql(Article)
self.assertEqual(len(index_sql), 1)
def test_ops_class(self):
index = Index(
name="test_ops_class",
fields=["headline"],
opclasses=["varchar_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % "test_ops_class")
self.assertEqual(
cursor.fetchall(), [("varchar_pattern_ops", "test_ops_class")]
)
def test_ops_class_multiple_columns(self):
index = Index(
name="test_ops_class_multiple",
fields=["headline", "body"],
opclasses=["varchar_pattern_ops", "text_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % "test_ops_class_multiple")
expected_ops_classes = (
("varchar_pattern_ops", "test_ops_class_multiple"),
("text_pattern_ops", "test_ops_class_multiple"),
)
self.assertCountEqual(cursor.fetchall(), expected_ops_classes)
def test_ops_class_partial(self):
index = Index(
name="test_ops_class_partial",
fields=["body"],
opclasses=["text_pattern_ops"],
condition=Q(headline__contains="China"),
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % "test_ops_class_partial")
self.assertCountEqual(
cursor.fetchall(), [("text_pattern_ops", "test_ops_class_partial")]
)
def test_ops_class_partial_tablespace(self):
indexname = "test_ops_class_tblspace"
index = Index(
name=indexname,
fields=["body"],
opclasses=["text_pattern_ops"],
condition=Q(headline__contains="China"),
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
self.assertIn(
'TABLESPACE "pg_default" ',
str(index.create_sql(IndexedArticle2, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % indexname)
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", indexname)])
def test_ops_class_descending(self):
indexname = "test_ops_class_ordered"
index = Index(
name=indexname,
fields=["-body"],
opclasses=["text_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % indexname)
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", indexname)])
def test_ops_class_descending_partial(self):
indexname = "test_ops_class_ordered_partial"
index = Index(
name=indexname,
fields=["-body"],
opclasses=["text_pattern_ops"],
condition=Q(headline__contains="China"),
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % indexname)
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", indexname)])
@skipUnlessDBFeature("supports_covering_indexes")
def test_ops_class_include(self):
index_name = "test_ops_class_include"
index = Index(
name=index_name,
fields=["body"],
opclasses=["text_pattern_ops"],
include=["headline"],
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % index_name)
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
@skipUnlessDBFeature("supports_covering_indexes")
def test_ops_class_include_tablespace(self):
index_name = "test_ops_class_include_tblspace"
index = Index(
name=index_name,
fields=["body"],
opclasses=["text_pattern_ops"],
include=["headline"],
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
self.assertIn(
'TABLESPACE "pg_default"',
str(index.create_sql(IndexedArticle2, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % index_name)
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
def test_ops_class_columns_lists_sql(self):
index = Index(
fields=["headline"],
name="whitespace_idx",
opclasses=["text_pattern_ops"],
)
with connection.schema_editor() as editor:
self.assertIn(
"(%s text_pattern_ops)" % editor.quote_name("headline"),
str(index.create_sql(Article, editor)),
)
def test_ops_class_descending_columns_list_sql(self):
index = Index(
fields=["-headline"],
name="whitespace_idx",
opclasses=["text_pattern_ops"],
)
with connection.schema_editor() as editor:
self.assertIn(
"(%s text_pattern_ops DESC)" % editor.quote_name("headline"),
str(index.create_sql(Article, editor)),
)
@skipUnless(connection.vendor == "mysql", "MySQL tests")
class SchemaIndexesMySQLTests(TransactionTestCase):
available_apps = ["indexes"]
def test_no_index_for_foreignkey(self):
"""
MySQL on InnoDB already creates indexes automatically for foreign keys.
(#14180). An index should be created if db_constraint=False (#26171).
"""
with connection.cursor() as cursor:
storage = connection.introspection.get_storage_engine(
cursor,
ArticleTranslation._meta.db_table,
)
if storage != "InnoDB":
self.skipTest("This test only applies to the InnoDB storage engine")
index_sql = [
str(statement)
for statement in connection.schema_editor()._model_indexes_sql(
ArticleTranslation
)
]
self.assertEqual(
index_sql,
[
"CREATE INDEX "
"`indexes_articletranslation_article_no_constraint_id_d6c0806b` "
"ON `indexes_articletranslation` (`article_no_constraint_id`)"
],
)
# The index also shouldn't be created if the ForeignKey is added after
# the model was created.
field_created = False
try:
with connection.schema_editor() as editor:
new_field = ForeignKey(Article, CASCADE)
new_field.set_attributes_from_name("new_foreign_key")
editor.add_field(ArticleTranslation, new_field)
field_created = True
# No deferred SQL. The FK constraint is included in the
# statement to add the field.
self.assertFalse(editor.deferred_sql)
finally:
if field_created:
with connection.schema_editor() as editor:
editor.remove_field(ArticleTranslation, new_field)
@skipUnlessDBFeature("supports_partial_indexes")
# SQLite doesn't support timezone-aware datetimes when USE_TZ is False.
@override_settings(USE_TZ=True)
class PartialIndexTests(TransactionTestCase):
# Schema editor is used to create the index to test that it works.
available_apps = ["indexes"]
def test_partial_index(self):
with connection.schema_editor() as editor:
index = Index(
name="recent_article_idx",
fields=["pub_date"],
condition=Q(
pub_date__gt=datetime.datetime(
year=2015,
month=1,
day=1,
# PostgreSQL would otherwise complain about the lookup
# being converted to a mutable function (by removing
# the timezone in the cast) which is forbidden.
tzinfo=timezone.get_current_timezone(),
),
),
)
self.assertIn(
"WHERE %s" % editor.quote_name("pub_date"),
str(index.create_sql(Article, schema_editor=editor)),
)
editor.add_index(index=index, model=Article)
with connection.cursor() as cursor:
self.assertIn(
index.name,
connection.introspection.get_constraints(
cursor=cursor,
table_name=Article._meta.db_table,
),
)
editor.remove_index(index=index, model=Article)
def test_integer_restriction_partial(self):
with connection.schema_editor() as editor:
index = Index(
name="recent_article_idx",
fields=["id"],
condition=Q(pk__gt=1),
)
self.assertIn(
"WHERE %s" % editor.quote_name("id"),
str(index.create_sql(Article, schema_editor=editor)),
)
editor.add_index(index=index, model=Article)
with connection.cursor() as cursor:
self.assertIn(
index.name,
connection.introspection.get_constraints(
cursor=cursor,
table_name=Article._meta.db_table,
),
)
editor.remove_index(index=index, model=Article)
def test_boolean_restriction_partial(self):
with connection.schema_editor() as editor:
index = Index(
name="published_index",
fields=["published"],
condition=Q(published=True),
)
self.assertIn(
"WHERE %s" % editor.quote_name("published"),
str(index.create_sql(Article, schema_editor=editor)),
)
editor.add_index(index=index, model=Article)
with connection.cursor() as cursor:
self.assertIn(
index.name,
connection.introspection.get_constraints(
cursor=cursor,
table_name=Article._meta.db_table,
),
)
editor.remove_index(index=index, model=Article)
@skipUnlessDBFeature("supports_functions_in_partial_indexes")
def test_multiple_conditions(self):
with connection.schema_editor() as editor:
index = Index(
name="recent_article_idx",
fields=["pub_date", "headline"],
condition=(
Q(
pub_date__gt=datetime.datetime(
year=2015,
month=1,
day=1,
tzinfo=timezone.get_current_timezone(),
)
)
& Q(headline__contains="China")
),
)
sql = str(index.create_sql(Article, schema_editor=editor))
where = sql.find("WHERE")
self.assertIn("WHERE (%s" % editor.quote_name("pub_date"), sql)
# Because each backend has different syntax for the operators,
# check ONLY the occurrence of headline in the SQL.
self.assertGreater(sql.rfind("headline"), where)
editor.add_index(index=index, model=Article)
with connection.cursor() as cursor:
self.assertIn(
index.name,
connection.introspection.get_constraints(
cursor=cursor,
table_name=Article._meta.db_table,
),
)
editor.remove_index(index=index, model=Article)
def test_is_null_condition(self):
with connection.schema_editor() as editor:
index = Index(
name="recent_article_idx",
fields=["pub_date"],
condition=Q(pub_date__isnull=False),
)
self.assertIn(
"WHERE %s IS NOT NULL" % editor.quote_name("pub_date"),
str(index.create_sql(Article, schema_editor=editor)),
)
editor.add_index(index=index, model=Article)
with connection.cursor() as cursor:
self.assertIn(
index.name,
connection.introspection.get_constraints(
cursor=cursor,
table_name=Article._meta.db_table,
),
)
editor.remove_index(index=index, model=Article)
@skipUnlessDBFeature("supports_expression_indexes")
def test_partial_func_index(self):
index_name = "partial_func_idx"
index = Index(
Lower("headline").desc(),
name=index_name,
condition=Q(pub_date__isnull=False),
)
with connection.schema_editor() as editor:
editor.add_index(index=index, model=Article)
sql = index.create_sql(Article, schema_editor=editor)
table = Article._meta.db_table
self.assertIs(sql.references_column(table, "headline"), True)
sql = str(sql)
self.assertIn("LOWER(%s)" % editor.quote_name("headline"), sql)
self.assertIn(
"WHERE %s IS NOT NULL" % editor.quote_name("pub_date"),
sql,
)
self.assertGreater(sql.find("WHERE"), sql.find("LOWER"))
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor=cursor,
table_name=table,
)
self.assertIn(index_name, constraints)
if connection.features.supports_index_column_ordering:
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
with connection.schema_editor() as editor:
editor.remove_index(Article, index)
with connection.cursor() as cursor:
self.assertNotIn(
index_name,
connection.introspection.get_constraints(
cursor=cursor,
table_name=table,
),
)
@skipUnlessDBFeature("supports_covering_indexes")
class CoveringIndexTests(TransactionTestCase):
available_apps = ["indexes"]
def test_covering_index(self):
index = Index(
name="covering_headline_idx",
fields=["headline"],
include=["pub_date", "published"],
)
with connection.schema_editor() as editor:
self.assertIn(
"(%s) INCLUDE (%s, %s)"
% (
editor.quote_name("headline"),
editor.quote_name("pub_date"),
editor.quote_name("published"),
),
str(index.create_sql(Article, editor)),
)
editor.add_index(Article, index)
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor=cursor,
table_name=Article._meta.db_table,
)
self.assertIn(index.name, constraints)
self.assertEqual(
constraints[index.name]["columns"],
["headline", "pub_date", "published"],
)
editor.remove_index(Article, index)
with connection.cursor() as cursor:
self.assertNotIn(
index.name,
connection.introspection.get_constraints(
cursor=cursor,
table_name=Article._meta.db_table,
),
)
def test_covering_partial_index(self):
index = Index(
name="covering_partial_headline_idx",
fields=["headline"],
include=["pub_date"],
condition=Q(pub_date__isnull=False),
)
with connection.schema_editor() as editor:
self.assertIn(
"(%s) INCLUDE (%s) WHERE %s "
% (
editor.quote_name("headline"),
editor.quote_name("pub_date"),
editor.quote_name("pub_date"),
),
str(index.create_sql(Article, editor)),
)
editor.add_index(Article, index)
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor=cursor,
table_name=Article._meta.db_table,
)
self.assertIn(index.name, constraints)
self.assertEqual(
constraints[index.name]["columns"],
["headline", "pub_date"],
)
editor.remove_index(Article, index)
with connection.cursor() as cursor:
self.assertNotIn(
index.name,
connection.introspection.get_constraints(
cursor=cursor,
table_name=Article._meta.db_table,
),
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_covering_func_index(self):
index_name = "covering_func_headline_idx"
index = Index(Lower("headline"), name=index_name, include=["pub_date"])
with connection.schema_editor() as editor:
editor.add_index(index=index, model=Article)
sql = index.create_sql(Article, schema_editor=editor)
table = Article._meta.db_table
self.assertIs(sql.references_column(table, "headline"), True)
sql = str(sql)
self.assertIn("LOWER(%s)" % editor.quote_name("headline"), sql)
self.assertIn("INCLUDE (%s)" % editor.quote_name("pub_date"), sql)
self.assertGreater(sql.find("INCLUDE"), sql.find("LOWER"))
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(
cursor=cursor,
table_name=table,
)
self.assertIn(index_name, constraints)
self.assertIn("pub_date", constraints[index_name]["columns"])
with connection.schema_editor() as editor:
editor.remove_index(Article, index)
with connection.cursor() as cursor:
self.assertNotIn(
index_name,
connection.introspection.get_constraints(
cursor=cursor,
table_name=table,
),
)
@skipIfDBFeature("supports_covering_indexes")
class CoveringIndexIgnoredTests(TransactionTestCase):
available_apps = ["indexes"]
def test_covering_ignored(self):
index = Index(
name="test_covering_ignored",
fields=["headline"],
include=["pub_date"],
)
with connection.schema_editor() as editor:
editor.add_index(Article, index)
self.assertNotIn(
"INCLUDE (%s)" % editor.quote_name("headline"),
str(index.create_sql(Article, editor)),
)
|
6f54001b63e3538a424e83e979d8e3eec75bd69db8d25b94bdd36442d9189038 | import base64
import os
import shutil
import string
import tempfile
import unittest
from datetime import timedelta
from http import cookies
from pathlib import Path
from unittest import mock
from django.conf import settings
from django.contrib.sessions.backends.base import UpdateError
from django.contrib.sessions.backends.cache import SessionStore as CacheSession
from django.contrib.sessions.backends.cached_db import SessionStore as CacheDBSession
from django.contrib.sessions.backends.db import SessionStore as DatabaseSession
from django.contrib.sessions.backends.file import SessionStore as FileSession
from django.contrib.sessions.backends.signed_cookies import (
SessionStore as CookieSession,
)
from django.contrib.sessions.exceptions import InvalidSessionKey, SessionInterrupted
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sessions.models import Session
from django.contrib.sessions.serializers import JSONSerializer
from django.core import management
from django.core.cache import caches
from django.core.cache.backends.base import InvalidCacheBackendError
from django.core.exceptions import ImproperlyConfigured
from django.core.signing import TimestampSigner
from django.http import HttpResponse
from django.test import (
RequestFactory,
SimpleTestCase,
TestCase,
ignore_warnings,
override_settings,
)
from django.utils import timezone
from .models import SessionStore as CustomDatabaseSession
class SessionTestsMixin:
# This does not inherit from TestCase to avoid any tests being run with this
# class, which wouldn't work, and to allow different TestCase subclasses to
# be used.
backend = None # subclasses must specify
def setUp(self):
self.session = self.backend()
def tearDown(self):
# NB: be careful to delete any sessions created; stale sessions fill up
# the /tmp (with some backends) and eventually overwhelm it after lots
# of runs (think buildbots)
self.session.delete()
def test_new_session(self):
self.assertIs(self.session.modified, False)
self.assertIs(self.session.accessed, False)
def test_get_empty(self):
self.assertIsNone(self.session.get("cat"))
def test_store(self):
self.session["cat"] = "dog"
self.assertIs(self.session.modified, True)
self.assertEqual(self.session.pop("cat"), "dog")
def test_pop(self):
self.session["some key"] = "exists"
# Need to reset these to pretend we haven't accessed it:
self.accessed = False
self.modified = False
self.assertEqual(self.session.pop("some key"), "exists")
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
self.assertIsNone(self.session.get("some key"))
def test_pop_default(self):
self.assertEqual(
self.session.pop("some key", "does not exist"), "does not exist"
)
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_pop_default_named_argument(self):
self.assertEqual(
self.session.pop("some key", default="does not exist"), "does not exist"
)
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_pop_no_default_keyerror_raised(self):
with self.assertRaises(KeyError):
self.session.pop("some key")
def test_setdefault(self):
self.assertEqual(self.session.setdefault("foo", "bar"), "bar")
self.assertEqual(self.session.setdefault("foo", "baz"), "bar")
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
def test_update(self):
self.session.update({"update key": 1})
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
self.assertEqual(self.session.get("update key", None), 1)
def test_has_key(self):
self.session["some key"] = 1
self.session.modified = False
self.session.accessed = False
self.assertIn("some key", self.session)
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_values(self):
self.assertEqual(list(self.session.values()), [])
self.assertIs(self.session.accessed, True)
self.session["some key"] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.values()), [1])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_keys(self):
self.session["x"] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.keys()), ["x"])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_items(self):
self.session["x"] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [("x", 1)])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_clear(self):
self.session["x"] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [("x", 1)])
self.session.clear()
self.assertEqual(list(self.session.items()), [])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
def test_save(self):
self.session.save()
self.assertIs(self.session.exists(self.session.session_key), True)
def test_delete(self):
self.session.save()
self.session.delete(self.session.session_key)
self.assertIs(self.session.exists(self.session.session_key), False)
def test_flush(self):
self.session["foo"] = "bar"
self.session.save()
prev_key = self.session.session_key
self.session.flush()
self.assertIs(self.session.exists(prev_key), False)
self.assertNotEqual(self.session.session_key, prev_key)
self.assertIsNone(self.session.session_key)
self.assertIs(self.session.modified, True)
self.assertIs(self.session.accessed, True)
def test_cycle(self):
self.session["a"], self.session["b"] = "c", "d"
self.session.save()
prev_key = self.session.session_key
prev_data = list(self.session.items())
self.session.cycle_key()
self.assertIs(self.session.exists(prev_key), False)
self.assertNotEqual(self.session.session_key, prev_key)
self.assertEqual(list(self.session.items()), prev_data)
def test_cycle_with_no_session_cache(self):
self.session["a"], self.session["b"] = "c", "d"
self.session.save()
prev_data = self.session.items()
self.session = self.backend(self.session.session_key)
self.assertIs(hasattr(self.session, "_session_cache"), False)
self.session.cycle_key()
self.assertCountEqual(self.session.items(), prev_data)
def test_save_doesnt_clear_data(self):
self.session["a"] = "b"
self.session.save()
self.assertEqual(self.session["a"], "b")
def test_invalid_key(self):
# Submitting an invalid session key (either by guessing, or if the db has
# removed the key) results in a new key being generated.
try:
session = self.backend("1")
session.save()
self.assertNotEqual(session.session_key, "1")
self.assertIsNone(session.get("cat"))
session.delete()
finally:
# Some backends leave a stale cache entry for the invalid
# session key; make sure that entry is manually deleted
session.delete("1")
def test_session_key_empty_string_invalid(self):
"""Falsey values (Such as an empty string) are rejected."""
self.session._session_key = ""
self.assertIsNone(self.session.session_key)
def test_session_key_too_short_invalid(self):
"""Strings shorter than 8 characters are rejected."""
self.session._session_key = "1234567"
self.assertIsNone(self.session.session_key)
def test_session_key_valid_string_saved(self):
"""Strings of length 8 and up are accepted and stored."""
self.session._session_key = "12345678"
self.assertEqual(self.session.session_key, "12345678")
def test_session_key_is_read_only(self):
def set_session_key(session):
session.session_key = session._get_new_session_key()
with self.assertRaises(AttributeError):
set_session_key(self.session)
# Custom session expiry
def test_default_expiry(self):
# A normal session has a max age equal to settings
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
# So does a custom session with an idle expiration time of 0 (but it'll
# expire at browser close)
self.session.set_expiry(0)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_custom_expiry_seconds(self):
modification = timezone.now()
self.session.set_expiry(10)
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_timedelta(self):
modification = timezone.now()
# Mock timezone.now, because set_expiry calls it on this code path.
original_now = timezone.now
try:
timezone.now = lambda: modification
self.session.set_expiry(timedelta(seconds=10))
finally:
timezone.now = original_now
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_datetime(self):
modification = timezone.now()
self.session.set_expiry(modification + timedelta(seconds=10))
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_reset(self):
self.session.set_expiry(None)
self.session.set_expiry(10)
self.session.set_expiry(None)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_get_expire_at_browser_close(self):
# Tests get_expire_at_browser_close with different settings and different
# set_expiry calls
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False):
self.session.set_expiry(10)
self.assertIs(self.session.get_expire_at_browser_close(), False)
self.session.set_expiry(0)
self.assertIs(self.session.get_expire_at_browser_close(), True)
self.session.set_expiry(None)
self.assertIs(self.session.get_expire_at_browser_close(), False)
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True):
self.session.set_expiry(10)
self.assertIs(self.session.get_expire_at_browser_close(), False)
self.session.set_expiry(0)
self.assertIs(self.session.get_expire_at_browser_close(), True)
self.session.set_expiry(None)
self.assertIs(self.session.get_expire_at_browser_close(), True)
def test_decode(self):
# Ensure we can decode what we encode
data = {"a test key": "a test value"}
encoded = self.session.encode(data)
self.assertEqual(self.session.decode(encoded), data)
def test_decode_failure_logged_to_security(self):
tests = [
base64.b64encode(b"flaskdj:alkdjf").decode("ascii"),
"bad:encoded:value",
]
for encoded in tests:
with self.subTest(encoded=encoded):
with self.assertLogs(
"django.security.SuspiciousSession", "WARNING"
) as cm:
self.assertEqual(self.session.decode(encoded), {})
# The failed decode is logged.
self.assertIn("Session data corrupted", cm.output[0])
def test_decode_serializer_exception(self):
signer = TimestampSigner(salt=self.session.key_salt)
encoded = signer.sign(b"invalid data")
self.assertEqual(self.session.decode(encoded), {})
def test_actual_expiry(self):
old_session_key = None
new_session_key = None
try:
self.session["foo"] = "bar"
self.session.set_expiry(-timedelta(seconds=10))
self.session.save()
old_session_key = self.session.session_key
# With an expiry date in the past, the session expires instantly.
new_session = self.backend(self.session.session_key)
new_session_key = new_session.session_key
self.assertNotIn("foo", new_session)
finally:
self.session.delete(old_session_key)
self.session.delete(new_session_key)
def test_session_load_does_not_create_record(self):
"""
Loading an unknown session key does not create a session record.
Creating session records on load is a DOS vulnerability.
"""
session = self.backend("someunknownkey")
session.load()
self.assertIsNone(session.session_key)
self.assertIs(session.exists(session.session_key), False)
# provided unknown key was cycled, not reused
self.assertNotEqual(session.session_key, "someunknownkey")
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
"""
Sessions shouldn't be resurrected by a concurrent request.
"""
# Create new session.
s1 = self.backend()
s1["test_data"] = "value1"
s1.save(must_create=True)
# Logout in another context.
s2 = self.backend(s1.session_key)
s2.delete()
# Modify session in first context.
s1["test_data"] = "value2"
with self.assertRaises(UpdateError):
# This should throw an exception as the session is deleted, not
# resurrect the session.
s1.save()
self.assertEqual(s1.load(), {})
class DatabaseSessionTests(SessionTestsMixin, TestCase):
backend = DatabaseSession
session_engine = "django.contrib.sessions.backends.db"
@property
def model(self):
return self.backend.get_model_class()
def test_session_str(self):
"Session repr should be the session key."
self.session["x"] = 1
self.session.save()
session_key = self.session.session_key
s = self.model.objects.get(session_key=session_key)
self.assertEqual(str(s), session_key)
def test_session_get_decoded(self):
"""
Test we can use Session.get_decoded to retrieve data stored
in normal way
"""
self.session["x"] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.get_decoded(), {"x": 1})
def test_sessionmanager_save(self):
"""
Test SessionManager.save method
"""
# Create a session
self.session["y"] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
# Change it
self.model.objects.save(s.session_key, {"y": 2}, s.expire_date)
# Clear cache, so that it will be retrieved from DB
del self.session._session_cache
self.assertEqual(self.session["y"], 2)
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
self.assertEqual(0, self.model.objects.count())
# One object in the future
self.session["foo"] = "bar"
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session["foo"] = "bar"
other_session.set_expiry(-3600)
other_session.save()
# Two sessions are in the database before clearsessions...
self.assertEqual(2, self.model.objects.count())
with override_settings(SESSION_ENGINE=self.session_engine):
management.call_command("clearsessions")
# ... and one is deleted.
self.assertEqual(1, self.model.objects.count())
@override_settings(USE_TZ=True)
class DatabaseSessionWithTimeZoneTests(DatabaseSessionTests):
pass
class CustomDatabaseSessionTests(DatabaseSessionTests):
backend = CustomDatabaseSession
session_engine = "sessions_tests.models"
custom_session_cookie_age = 60 * 60 * 24 # One day.
def test_extra_session_field(self):
# Set the account ID to be picked up by a custom session storage
# and saved to a custom session model database column.
self.session["_auth_user_id"] = 42
self.session.save()
# Make sure that the customized create_model_instance() was called.
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.account_id, 42)
# Make the session "anonymous".
self.session.pop("_auth_user_id")
self.session.save()
# Make sure that save() on an existing session did the right job.
s = self.model.objects.get(session_key=self.session.session_key)
self.assertIsNone(s.account_id)
def test_custom_expiry_reset(self):
self.session.set_expiry(None)
self.session.set_expiry(10)
self.session.set_expiry(None)
self.assertEqual(self.session.get_expiry_age(), self.custom_session_cookie_age)
def test_default_expiry(self):
self.assertEqual(self.session.get_expiry_age(), self.custom_session_cookie_age)
self.session.set_expiry(0)
self.assertEqual(self.session.get_expiry_age(), self.custom_session_cookie_age)
class CacheDBSessionTests(SessionTestsMixin, TestCase):
backend = CacheDBSession
def test_exists_searches_cache_first(self):
self.session.save()
with self.assertNumQueries(0):
self.assertIs(self.session.exists(self.session.session_key), True)
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
@override_settings(SESSION_CACHE_ALIAS="sessions")
def test_non_default_cache(self):
# 21000 - CacheDB backend should respect SESSION_CACHE_ALIAS.
with self.assertRaises(InvalidCacheBackendError):
self.backend()
@override_settings(USE_TZ=True)
class CacheDBSessionWithTimeZoneTests(CacheDBSessionTests):
pass
class FileSessionTests(SessionTestsMixin, SimpleTestCase):
backend = FileSession
def setUp(self):
# Do file session tests in an isolated directory, and kill it after we're done.
self.original_session_file_path = settings.SESSION_FILE_PATH
self.temp_session_store = settings.SESSION_FILE_PATH = self.mkdtemp()
# Reset the file session backend's internal caches
if hasattr(self.backend, "_storage_path"):
del self.backend._storage_path
super().setUp()
def tearDown(self):
super().tearDown()
settings.SESSION_FILE_PATH = self.original_session_file_path
shutil.rmtree(self.temp_session_store)
def mkdtemp(self):
return tempfile.mkdtemp()
@override_settings(
SESSION_FILE_PATH="/if/this/directory/exists/you/have/a/weird/computer",
)
def test_configuration_check(self):
del self.backend._storage_path
# Make sure the file backend checks for a good storage dir
with self.assertRaises(ImproperlyConfigured):
self.backend()
def test_invalid_key_backslash(self):
# Ensure we don't allow directory-traversal.
# This is tested directly on _key_to_file, as load() will swallow
# a SuspiciousOperation in the same way as an OSError - by creating
# a new session, making it unclear whether the slashes were detected.
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a\\b\\c")
def test_invalid_key_forwardslash(self):
# Ensure we don't allow directory-traversal
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a/b/c")
@override_settings(
SESSION_ENGINE="django.contrib.sessions.backends.file",
SESSION_COOKIE_AGE=0,
)
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
storage_path = self.backend._get_storage_path()
file_prefix = settings.SESSION_COOKIE_NAME
def count_sessions():
return len(
[
session_file
for session_file in os.listdir(storage_path)
if session_file.startswith(file_prefix)
]
)
self.assertEqual(0, count_sessions())
# One object in the future
self.session["foo"] = "bar"
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session["foo"] = "bar"
other_session.set_expiry(-3600)
other_session.save()
# One object in the present without an expiry (should be deleted since
# its modification time + SESSION_COOKIE_AGE will be in the past when
# clearsessions runs).
other_session2 = self.backend()
other_session2["foo"] = "bar"
other_session2.save()
# Three sessions are in the filesystem before clearsessions...
self.assertEqual(3, count_sessions())
management.call_command("clearsessions")
# ... and two are deleted.
self.assertEqual(1, count_sessions())
class FileSessionPathLibTests(FileSessionTests):
def mkdtemp(self):
tmp_dir = super().mkdtemp()
return Path(tmp_dir)
class CacheSessionTests(SessionTestsMixin, SimpleTestCase):
backend = CacheSession
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
def test_default_cache(self):
self.session.save()
self.assertIsNotNone(caches["default"].get(self.session.cache_key))
@override_settings(
CACHES={
"default": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache",
},
"sessions": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "session",
},
},
SESSION_CACHE_ALIAS="sessions",
)
def test_non_default_cache(self):
# Re-initialize the session backend to make use of overridden settings.
self.session = self.backend()
self.session.save()
self.assertIsNone(caches["default"].get(self.session.cache_key))
self.assertIsNotNone(caches["sessions"].get(self.session.cache_key))
def test_create_and_save(self):
self.session = self.backend()
self.session.create()
self.session.save()
self.assertIsNotNone(caches["default"].get(self.session.cache_key))
class SessionMiddlewareTests(TestCase):
request_factory = RequestFactory()
@staticmethod
def get_response_touching_session(request):
request.session["hello"] = "world"
return HttpResponse("Session test")
@override_settings(SESSION_COOKIE_SECURE=True)
def test_secure_session_cookie(self):
request = self.request_factory.get("/")
middleware = SessionMiddleware(self.get_response_touching_session)
# Handle the response through the middleware
response = middleware(request)
self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]["secure"], True)
@override_settings(SESSION_COOKIE_HTTPONLY=True)
def test_httponly_session_cookie(self):
request = self.request_factory.get("/")
middleware = SessionMiddleware(self.get_response_touching_session)
# Handle the response through the middleware
response = middleware(request)
self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]["httponly"], True)
self.assertIn(
cookies.Morsel._reserved["httponly"],
str(response.cookies[settings.SESSION_COOKIE_NAME]),
)
@override_settings(SESSION_COOKIE_SAMESITE="Strict")
def test_samesite_session_cookie(self):
request = self.request_factory.get("/")
middleware = SessionMiddleware(self.get_response_touching_session)
response = middleware(request)
self.assertEqual(
response.cookies[settings.SESSION_COOKIE_NAME]["samesite"], "Strict"
)
@override_settings(SESSION_COOKIE_HTTPONLY=False)
def test_no_httponly_session_cookie(self):
request = self.request_factory.get("/")
middleware = SessionMiddleware(self.get_response_touching_session)
response = middleware(request)
self.assertEqual(response.cookies[settings.SESSION_COOKIE_NAME]["httponly"], "")
self.assertNotIn(
cookies.Morsel._reserved["httponly"],
str(response.cookies[settings.SESSION_COOKIE_NAME]),
)
def test_session_save_on_500(self):
def response_500(requset):
response = HttpResponse("Horrible error")
response.status_code = 500
request.session["hello"] = "world"
return response
request = self.request_factory.get("/")
SessionMiddleware(response_500)(request)
# The value wasn't saved above.
self.assertNotIn("hello", request.session.load())
def test_session_update_error_redirect(self):
def response_delete_session(request):
request.session = DatabaseSession()
request.session.save(must_create=True)
request.session.delete()
return HttpResponse()
request = self.request_factory.get("/foo/")
middleware = SessionMiddleware(response_delete_session)
msg = (
"The request's session was deleted before the request completed. "
"The user may have logged out in a concurrent request, for example."
)
with self.assertRaisesMessage(SessionInterrupted, msg):
# Handle the response through the middleware. It will try to save
# the deleted session which will cause an UpdateError that's caught
# and raised as a SessionInterrupted.
middleware(request)
def test_session_delete_on_end(self):
def response_ending_session(request):
request.session.flush()
return HttpResponse("Session test")
request = self.request_factory.get("/")
middleware = SessionMiddleware(response_ending_session)
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = "abc"
# Handle the response through the middleware
response = middleware(request)
# The cookie was deleted, not recreated.
# A deleted cookie header looks like:
# "Set-Cookie: sessionid=; expires=Thu, 01 Jan 1970 00:00:00 GMT; "
# "Max-Age=0; Path=/"
self.assertEqual(
'Set-Cookie: {}=""; expires=Thu, 01 Jan 1970 00:00:00 GMT; '
"Max-Age=0; Path=/; SameSite={}".format(
settings.SESSION_COOKIE_NAME,
settings.SESSION_COOKIE_SAMESITE,
),
str(response.cookies[settings.SESSION_COOKIE_NAME]),
)
# SessionMiddleware sets 'Vary: Cookie' to prevent the 'Set-Cookie'
# from being cached.
self.assertEqual(response.headers["Vary"], "Cookie")
@override_settings(
SESSION_COOKIE_DOMAIN=".example.local", SESSION_COOKIE_PATH="/example/"
)
def test_session_delete_on_end_with_custom_domain_and_path(self):
def response_ending_session(request):
request.session.flush()
return HttpResponse("Session test")
request = self.request_factory.get("/")
middleware = SessionMiddleware(response_ending_session)
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = "abc"
# Handle the response through the middleware
response = middleware(request)
# The cookie was deleted, not recreated.
# A deleted cookie header with a custom domain and path looks like:
# Set-Cookie: sessionid=; Domain=.example.local;
# expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0;
# Path=/example/
self.assertEqual(
'Set-Cookie: {}=""; Domain=.example.local; expires=Thu, '
"01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/example/; SameSite={}".format(
settings.SESSION_COOKIE_NAME,
settings.SESSION_COOKIE_SAMESITE,
),
str(response.cookies[settings.SESSION_COOKIE_NAME]),
)
def test_flush_empty_without_session_cookie_doesnt_set_cookie(self):
def response_ending_session(request):
request.session.flush()
return HttpResponse("Session test")
request = self.request_factory.get("/")
middleware = SessionMiddleware(response_ending_session)
# Handle the response through the middleware
response = middleware(request)
# A cookie should not be set.
self.assertEqual(response.cookies, {})
# The session is accessed so "Vary: Cookie" should be set.
self.assertEqual(response.headers["Vary"], "Cookie")
def test_empty_session_saved(self):
"""
If a session is emptied of data but still has a key, it should still
be updated.
"""
def response_set_session(request):
# Set a session key and some data.
request.session["foo"] = "bar"
return HttpResponse("Session test")
request = self.request_factory.get("/")
middleware = SessionMiddleware(response_set_session)
# Handle the response through the middleware.
response = middleware(request)
self.assertEqual(tuple(request.session.items()), (("foo", "bar"),))
# A cookie should be set, along with Vary: Cookie.
self.assertIn(
"Set-Cookie: sessionid=%s" % request.session.session_key,
str(response.cookies),
)
self.assertEqual(response.headers["Vary"], "Cookie")
# Empty the session data.
del request.session["foo"]
# Handle the response through the middleware.
response = HttpResponse("Session test")
response = middleware.process_response(request, response)
self.assertEqual(dict(request.session.values()), {})
session = Session.objects.get(session_key=request.session.session_key)
self.assertEqual(session.get_decoded(), {})
# While the session is empty, it hasn't been flushed so a cookie should
# still be set, along with Vary: Cookie.
self.assertGreater(len(request.session.session_key), 8)
self.assertIn(
"Set-Cookie: sessionid=%s" % request.session.session_key,
str(response.cookies),
)
self.assertEqual(response.headers["Vary"], "Cookie")
class CookieSessionTests(SessionTestsMixin, SimpleTestCase):
backend = CookieSession
def test_save(self):
"""
This test tested exists() in the other session backends, but that
doesn't make sense for us.
"""
pass
def test_cycle(self):
"""
This test tested cycle_key() which would create a new session
key for the same session data. But we can't invalidate previously
signed cookies (other than letting them expire naturally) so
testing for this behavior is meaningless.
"""
pass
@unittest.expectedFailure
def test_actual_expiry(self):
# The cookie backend doesn't handle non-default expiry dates, see #19201
super().test_actual_expiry()
def test_unpickling_exception(self):
# signed_cookies backend should handle unpickle exceptions gracefully
# by creating a new session
self.assertEqual(self.session.serializer, JSONSerializer)
self.session.save()
with mock.patch("django.core.signing.loads", side_effect=ValueError):
self.session.load()
@unittest.skip(
"Cookie backend doesn't have an external store to create records in."
)
def test_session_load_does_not_create_record(self):
pass
@unittest.skip(
"CookieSession is stored in the client and there is no way to query it."
)
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
pass
class ClearSessionsCommandTests(SimpleTestCase):
def test_clearsessions_unsupported(self):
msg = (
"Session engine 'sessions_tests.no_clear_expired' doesn't "
"support clearing expired sessions."
)
with self.settings(SESSION_ENGINE="sessions_tests.no_clear_expired"):
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command("clearsessions")
|
f500c7f32cdbc15b56cc9d26f5bf5dd3418731b6802f28e2306c6fff75971204 | """
Testing using the Test Client
The test client is a class that can act like a simple
browser for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
``Client`` objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the ``Client`` instance.
This is not intended as a replacement for Twill, Selenium, or
other browser automation frameworks - it is here to allow
testing against the contexts and templates produced by a view,
rather than the HTML rendered to the end-user.
"""
import itertools
import tempfile
from unittest import mock
from django.contrib.auth.models import User
from django.core import mail
from django.http import HttpResponse, HttpResponseNotAllowed
from django.test import (
AsyncRequestFactory,
Client,
RequestFactory,
SimpleTestCase,
TestCase,
modify_settings,
override_settings,
)
from django.urls import reverse_lazy
from django.utils.decorators import async_only_middleware
from django.views.generic import RedirectView
from .views import TwoArgException, get_view, post_view, trace_view
def middleware_urlconf(get_response):
def middleware(request):
request.urlconf = "test_client.urls_middleware_urlconf"
return get_response(request)
return middleware
@async_only_middleware
def async_middleware_urlconf(get_response):
async def middleware(request):
request.urlconf = "test_client.urls_middleware_urlconf"
return await get_response(request)
return middleware
@override_settings(ROOT_URLCONF="test_client.urls")
class ClientTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username="testclient", password="password")
cls.u2 = User.objects.create_user(
username="inactive", password="password", is_active=False
)
def test_get_view(self):
"GET a view"
# The data is ignored, but let's check it doesn't crash the system
# anyway.
data = {"var": "\xf2"}
response = self.client.get("/get_view/", data)
# Check some response details
self.assertContains(response, "This is a test")
self.assertEqual(response.context["var"], "\xf2")
self.assertEqual(response.templates[0].name, "GET Template")
def test_query_string_encoding(self):
# WSGI requires latin-1 encoded strings.
response = self.client.get("/get_view/?var=1\ufffd")
self.assertEqual(response.context["var"], "1\ufffd")
def test_get_data_none(self):
msg = (
"Cannot encode None for key 'value' in a query string. Did you "
"mean to pass an empty string or omit the value?"
)
with self.assertRaisesMessage(TypeError, msg):
self.client.get("/get_view/", {"value": None})
def test_get_post_view(self):
"GET a view that normally expects POSTs"
response = self.client.get("/post_view/", {})
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "Empty GET Template")
self.assertTemplateUsed(response, "Empty GET Template")
self.assertTemplateNotUsed(response, "Empty POST Template")
def test_empty_post(self):
"POST an empty dictionary to a view"
response = self.client.post("/post_view/", {})
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "Empty POST Template")
self.assertTemplateNotUsed(response, "Empty GET Template")
self.assertTemplateUsed(response, "Empty POST Template")
def test_post(self):
"POST some data to a view"
post_data = {"value": 37}
response = self.client.post("/post_view/", post_data)
# Check some response details
self.assertContains(response, "Data received")
self.assertEqual(response.context["data"], "37")
self.assertEqual(response.templates[0].name, "POST Template")
def test_post_data_none(self):
msg = (
"Cannot encode None for key 'value' as POST data. Did you mean "
"to pass an empty string or omit the value?"
)
with self.assertRaisesMessage(TypeError, msg):
self.client.post("/post_view/", {"value": None})
def test_json_serialization(self):
"""The test client serializes JSON data."""
methods = ("post", "put", "patch", "delete")
tests = (
({"value": 37}, {"value": 37}),
([37, True], [37, True]),
((37, False), [37, False]),
)
for method in methods:
with self.subTest(method=method):
for data, expected in tests:
with self.subTest(data):
client_method = getattr(self.client, method)
method_name = method.upper()
response = client_method(
"/json_view/", data, content_type="application/json"
)
self.assertContains(response, "Viewing %s page." % method_name)
self.assertEqual(response.context["data"], expected)
def test_json_encoder_argument(self):
"""The test Client accepts a json_encoder."""
mock_encoder = mock.MagicMock()
mock_encoding = mock.MagicMock()
mock_encoder.return_value = mock_encoding
mock_encoding.encode.return_value = '{"value": 37}'
client = self.client_class(json_encoder=mock_encoder)
# Vendored tree JSON content types are accepted.
client.post(
"/json_view/", {"value": 37}, content_type="application/vnd.api+json"
)
self.assertTrue(mock_encoder.called)
self.assertTrue(mock_encoding.encode.called)
def test_put(self):
response = self.client.put("/put_view/", {"foo": "bar"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "PUT Template")
self.assertEqual(response.context["data"], "{'foo': 'bar'}")
self.assertEqual(response.context["Content-Length"], "14")
def test_trace(self):
"""TRACE a view"""
response = self.client.trace("/trace_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["method"], "TRACE")
self.assertEqual(response.templates[0].name, "TRACE Template")
def test_response_headers(self):
"Check the value of HTTP headers returned in a response"
response = self.client.get("/header_view/")
self.assertEqual(response.headers["X-DJANGO-TEST"], "Slartibartfast")
def test_response_attached_request(self):
"""
The returned response has a ``request`` attribute with the originating
environ dict and a ``wsgi_request`` with the originating WSGIRequest.
"""
response = self.client.get("/header_view/")
self.assertTrue(hasattr(response, "request"))
self.assertTrue(hasattr(response, "wsgi_request"))
for key, value in response.request.items():
self.assertIn(key, response.wsgi_request.environ)
self.assertEqual(response.wsgi_request.environ[key], value)
def test_response_resolver_match(self):
"""
The response contains a ResolverMatch instance.
"""
response = self.client.get("/header_view/")
self.assertTrue(hasattr(response, "resolver_match"))
def test_response_resolver_match_redirect_follow(self):
"""
The response ResolverMatch instance contains the correct
information when following redirects.
"""
response = self.client.get("/redirect_view/", follow=True)
self.assertEqual(response.resolver_match.url_name, "get_view")
def test_response_resolver_match_regular_view(self):
"""
The response ResolverMatch instance contains the correct
information when accessing a regular view.
"""
response = self.client.get("/get_view/")
self.assertEqual(response.resolver_match.url_name, "get_view")
def test_response_resolver_match_class_based_view(self):
"""
The response ResolverMatch instance can be used to access the CBV view
class.
"""
response = self.client.get("/accounts/")
self.assertIs(response.resolver_match.func.view_class, RedirectView)
@modify_settings(MIDDLEWARE={"prepend": "test_client.tests.middleware_urlconf"})
def test_response_resolver_match_middleware_urlconf(self):
response = self.client.get("/middleware_urlconf_view/")
self.assertEqual(response.resolver_match.url_name, "middleware_urlconf_view")
def test_raw_post(self):
"POST raw data (with a content type) to a view"
test_doc = """<?xml version="1.0" encoding="utf-8"?>
<library><book><title>Blink</title><author>Malcolm Gladwell</author></book>
</library>
"""
response = self.client.post(
"/raw_post_view/", test_doc, content_type="text/xml"
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "Book template")
self.assertEqual(response.content, b"Blink - Malcolm Gladwell")
def test_insecure(self):
"GET a URL through http"
response = self.client.get("/secure_view/", secure=False)
self.assertFalse(response.test_was_secure_request)
self.assertEqual(response.test_server_port, "80")
def test_secure(self):
"GET a URL through https"
response = self.client.get("/secure_view/", secure=True)
self.assertTrue(response.test_was_secure_request)
self.assertEqual(response.test_server_port, "443")
def test_redirect(self):
"GET a URL that redirects elsewhere"
response = self.client.get("/redirect_view/")
self.assertRedirects(response, "/get_view/")
def test_redirect_with_query(self):
"GET a URL that redirects with given GET parameters"
response = self.client.get("/redirect_view/", {"var": "value"})
self.assertRedirects(response, "/get_view/?var=value")
def test_redirect_with_query_ordering(self):
"""assertRedirects() ignores the order of query string parameters."""
response = self.client.get("/redirect_view/", {"var": "value", "foo": "bar"})
self.assertRedirects(response, "/get_view/?var=value&foo=bar")
self.assertRedirects(response, "/get_view/?foo=bar&var=value")
def test_permanent_redirect(self):
"GET a URL that redirects permanently elsewhere"
response = self.client.get("/permanent_redirect_view/")
self.assertRedirects(response, "/get_view/", status_code=301)
def test_temporary_redirect(self):
"GET a URL that does a non-permanent redirect"
response = self.client.get("/temporary_redirect_view/")
self.assertRedirects(response, "/get_view/", status_code=302)
def test_redirect_to_strange_location(self):
"GET a URL that redirects to a non-200 page"
response = self.client.get("/double_redirect_view/")
# The response was a 302, and that the attempt to get the redirection
# location returned 301 when retrieved
self.assertRedirects(
response, "/permanent_redirect_view/", target_status_code=301
)
def test_follow_redirect(self):
"A URL that redirects can be followed to termination."
response = self.client.get("/double_redirect_view/", follow=True)
self.assertRedirects(
response, "/get_view/", status_code=302, target_status_code=200
)
self.assertEqual(len(response.redirect_chain), 2)
def test_follow_relative_redirect(self):
"A URL with a relative redirect can be followed."
response = self.client.get("/accounts/", follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request["PATH_INFO"], "/accounts/login/")
def test_follow_relative_redirect_no_trailing_slash(self):
"A URL with a relative redirect with no trailing slash can be followed."
response = self.client.get("/accounts/no_trailing_slash", follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request["PATH_INFO"], "/accounts/login/")
def test_redirect_to_querystring_only(self):
"""A URL that consists of a querystring only can be followed"""
response = self.client.post("/post_then_get_view/", follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request["PATH_INFO"], "/post_then_get_view/")
self.assertEqual(response.content, b"The value of success is true.")
def test_follow_307_and_308_redirect(self):
"""
A 307 or 308 redirect preserves the request method after the redirect.
"""
methods = ("get", "post", "head", "options", "put", "patch", "delete", "trace")
codes = (307, 308)
for method, code in itertools.product(methods, codes):
with self.subTest(method=method, code=code):
req_method = getattr(self.client, method)
response = req_method(
"/redirect_view_%s/" % code, data={"value": "test"}, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request["PATH_INFO"], "/post_view/")
self.assertEqual(response.request["REQUEST_METHOD"], method.upper())
def test_follow_307_and_308_preserves_query_string(self):
methods = ("post", "options", "put", "patch", "delete", "trace")
codes = (307, 308)
for method, code in itertools.product(methods, codes):
with self.subTest(method=method, code=code):
req_method = getattr(self.client, method)
response = req_method(
"/redirect_view_%s_query_string/" % code,
data={"value": "test"},
follow=True,
)
self.assertRedirects(
response, "/post_view/?hello=world", status_code=code
)
self.assertEqual(response.request["QUERY_STRING"], "hello=world")
def test_follow_307_and_308_get_head_query_string(self):
methods = ("get", "head")
codes = (307, 308)
for method, code in itertools.product(methods, codes):
with self.subTest(method=method, code=code):
req_method = getattr(self.client, method)
response = req_method(
"/redirect_view_%s_query_string/" % code,
data={"value": "test"},
follow=True,
)
self.assertRedirects(
response, "/post_view/?hello=world", status_code=code
)
self.assertEqual(response.request["QUERY_STRING"], "value=test")
def test_follow_307_and_308_preserves_post_data(self):
for code in (307, 308):
with self.subTest(code=code):
response = self.client.post(
"/redirect_view_%s/" % code, data={"value": "test"}, follow=True
)
self.assertContains(response, "test is the value")
def test_follow_307_and_308_preserves_put_body(self):
for code in (307, 308):
with self.subTest(code=code):
response = self.client.put(
"/redirect_view_%s/?to=/put_view/" % code, data="a=b", follow=True
)
self.assertContains(response, "a=b is the body")
def test_follow_307_and_308_preserves_get_params(self):
data = {"var": 30, "to": "/get_view/"}
for code in (307, 308):
with self.subTest(code=code):
response = self.client.get(
"/redirect_view_%s/" % code, data=data, follow=True
)
self.assertContains(response, "30 is the value")
def test_redirect_http(self):
"""GET a URL that redirects to an HTTP URI."""
response = self.client.get("/http_redirect_view/", follow=True)
self.assertFalse(response.test_was_secure_request)
def test_redirect_https(self):
"""GET a URL that redirects to an HTTPS URI."""
response = self.client.get("/https_redirect_view/", follow=True)
self.assertTrue(response.test_was_secure_request)
def test_notfound_response(self):
"GET a URL that responds as '404:Not Found'"
response = self.client.get("/bad_view/")
self.assertContains(response, "MAGIC", status_code=404)
def test_valid_form(self):
"POST valid data to a form"
post_data = {
"text": "Hello World",
"email": "[email protected]",
"value": 37,
"single": "b",
"multi": ("b", "c", "e"),
}
response = self.client.post("/form_view/", post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Valid POST Template")
def test_valid_form_with_hints(self):
"GET a form, providing hints in the GET data"
hints = {"text": "Hello World", "multi": ("b", "c", "e")}
response = self.client.get("/form_view/", data=hints)
# The multi-value data has been rolled out ok
self.assertContains(response, "Select a valid choice.", 0)
self.assertTemplateUsed(response, "Form GET Template")
def test_incomplete_data_form(self):
"POST incomplete data to a form"
post_data = {"text": "Hello World", "value": 37}
response = self.client.post("/form_view/", post_data)
self.assertContains(response, "This field is required.", 3)
self.assertTemplateUsed(response, "Invalid POST Template")
form = response.context["form"]
self.assertFormError(form, "email", "This field is required.")
self.assertFormError(form, "single", "This field is required.")
self.assertFormError(form, "multi", "This field is required.")
def test_form_error(self):
"POST erroneous data to a form"
post_data = {
"text": "Hello World",
"email": "not an email address",
"value": 37,
"single": "b",
"multi": ("b", "c", "e"),
}
response = self.client.post("/form_view/", post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
self.assertFormError(
response.context["form"], "email", "Enter a valid email address."
)
def test_valid_form_with_template(self):
"POST valid data to a form using multiple templates"
post_data = {
"text": "Hello World",
"email": "[email protected]",
"value": 37,
"single": "b",
"multi": ("b", "c", "e"),
}
response = self.client.post("/form_view_with_template/", post_data)
self.assertContains(response, "POST data OK")
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, "base.html")
self.assertTemplateNotUsed(response, "Valid POST Template")
def test_incomplete_data_form_with_template(self):
"POST incomplete data to a form using multiple templates"
post_data = {"text": "Hello World", "value": 37}
response = self.client.post("/form_view_with_template/", post_data)
self.assertContains(response, "POST data has errors")
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, "base.html")
self.assertTemplateNotUsed(response, "Invalid POST Template")
form = response.context["form"]
self.assertFormError(form, "email", "This field is required.")
self.assertFormError(form, "single", "This field is required.")
self.assertFormError(form, "multi", "This field is required.")
def test_form_error_with_template(self):
"POST erroneous data to a form using multiple templates"
post_data = {
"text": "Hello World",
"email": "not an email address",
"value": 37,
"single": "b",
"multi": ("b", "c", "e"),
}
response = self.client.post("/form_view_with_template/", post_data)
self.assertContains(response, "POST data has errors")
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, "base.html")
self.assertTemplateNotUsed(response, "Invalid POST Template")
self.assertFormError(
response.context["form"], "email", "Enter a valid email address."
)
def test_unknown_page(self):
"GET an invalid URL"
response = self.client.get("/unknown_view/")
# The response was a 404
self.assertEqual(response.status_code, 404)
def test_url_parameters(self):
"Make sure that URL ;-parameters are not stripped."
response = self.client.get("/unknown_view/;some-parameter")
# The path in the response includes it (ignore that it's a 404)
self.assertEqual(response.request["PATH_INFO"], "/unknown_view/;some-parameter")
def test_view_with_login(self):
"Request a page that is protected with @login_required"
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
@override_settings(
INSTALLED_APPS=["django.contrib.auth"],
SESSION_ENGINE="django.contrib.sessions.backends.file",
)
def test_view_with_login_when_sessions_app_is_not_installed(self):
self.test_view_with_login()
def test_view_with_force_login(self):
"Request a page that is protected with @login_required"
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
def test_view_with_method_login(self):
"Request a page that is protected with a @login_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_method_view/")
self.assertRedirects(
response, "/accounts/login/?next=/login_protected_method_view/"
)
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Request a page that requires a login
response = self.client.get("/login_protected_method_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
def test_view_with_method_force_login(self):
"Request a page that is protected with a @login_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_method_view/")
self.assertRedirects(
response, "/accounts/login/?next=/login_protected_method_view/"
)
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get("/login_protected_method_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
def test_view_with_login_and_custom_redirect(self):
"""
Request a page that is protected with
@login_required(redirect_field_name='redirect_to')
"""
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view_custom_redirect/")
self.assertRedirects(
response,
"/accounts/login/?redirect_to=/login_protected_view_custom_redirect/",
)
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Request a page that requires a login
response = self.client.get("/login_protected_view_custom_redirect/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
def test_view_with_force_login_and_custom_redirect(self):
"""
Request a page that is protected with
@login_required(redirect_field_name='redirect_to')
"""
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view_custom_redirect/")
self.assertRedirects(
response,
"/accounts/login/?redirect_to=/login_protected_view_custom_redirect/",
)
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get("/login_protected_view_custom_redirect/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
def test_view_with_bad_login(self):
"Request a page that is protected with @login, but use bad credentials"
login = self.client.login(username="otheruser", password="nopassword")
self.assertFalse(login)
def test_view_with_inactive_login(self):
"""
An inactive user may login if the authenticate backend allows it.
"""
credentials = {"username": "inactive", "password": "password"}
self.assertFalse(self.client.login(**credentials))
with self.settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.AllowAllUsersModelBackend"
]
):
self.assertTrue(self.client.login(**credentials))
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.ModelBackend",
"django.contrib.auth.backends.AllowAllUsersModelBackend",
]
)
def test_view_with_inactive_force_login(self):
"Request a page that is protected with @login, but use an inactive login"
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
# Log in
self.client.force_login(
self.u2, backend="django.contrib.auth.backends.AllowAllUsersModelBackend"
)
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "inactive")
def test_logout(self):
"Request a logout after logging in"
# Log in
self.client.login(username="testclient", password="password")
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
# Log out
self.client.logout()
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
def test_logout_with_force_login(self):
"Request a logout after logging in"
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
# Log out
self.client.logout()
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.ModelBackend",
"test_client.auth_backends.TestClientBackend",
],
)
def test_force_login_with_backend(self):
"""
Request a page that is protected with @login_required when using
force_login() and passing a backend.
"""
# Get the page without logging in. Should result in 302.
response = self.client.get("/login_protected_view/")
self.assertRedirects(response, "/accounts/login/?next=/login_protected_view/")
# Log in
self.client.force_login(
self.u1, backend="test_client.auth_backends.TestClientBackend"
)
self.assertEqual(self.u1.backend, "test_client.auth_backends.TestClientBackend")
# Request a page that requires a login
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.ModelBackend",
"test_client.auth_backends.TestClientBackend",
],
)
def test_force_login_without_backend(self):
"""
force_login() without passing a backend and with multiple backends
configured should automatically use the first backend.
"""
self.client.force_login(self.u1)
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["user"].username, "testclient")
self.assertEqual(self.u1.backend, "django.contrib.auth.backends.ModelBackend")
@override_settings(
AUTHENTICATION_BACKENDS=[
"test_client.auth_backends.BackendWithoutGetUserMethod",
"django.contrib.auth.backends.ModelBackend",
]
)
def test_force_login_with_backend_missing_get_user(self):
"""
force_login() skips auth backends without a get_user() method.
"""
self.client.force_login(self.u1)
self.assertEqual(self.u1.backend, "django.contrib.auth.backends.ModelBackend")
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.signed_cookies")
def test_logout_cookie_sessions(self):
self.test_logout()
def test_view_with_permissions(self):
"Request a page that is protected with @permission_required"
# Get the page without logging in. Should result in 302.
response = self.client.get("/permission_protected_view/")
self.assertRedirects(
response, "/accounts/login/?next=/permission_protected_view/"
)
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Log in with wrong permissions. Should result in 302.
response = self.client.get("/permission_protected_view/")
self.assertRedirects(
response, "/accounts/login/?next=/permission_protected_view/"
)
# TODO: Log in with right permissions and request the page again
def test_view_with_permissions_exception(self):
"""
Request a page that is protected with @permission_required but raises
an exception.
"""
# Get the page without logging in. Should result in 403.
response = self.client.get("/permission_protected_view_exception/")
self.assertEqual(response.status_code, 403)
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Log in with wrong permissions. Should result in 403.
response = self.client.get("/permission_protected_view_exception/")
self.assertEqual(response.status_code, 403)
def test_view_with_method_permissions(self):
"Request a page that is protected with a @permission_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get("/permission_protected_method_view/")
self.assertRedirects(
response, "/accounts/login/?next=/permission_protected_method_view/"
)
# Log in
login = self.client.login(username="testclient", password="password")
self.assertTrue(login, "Could not log in")
# Log in with wrong permissions. Should result in 302.
response = self.client.get("/permission_protected_method_view/")
self.assertRedirects(
response, "/accounts/login/?next=/permission_protected_method_view/"
)
# TODO: Log in with right permissions and request the page again
def test_external_redirect(self):
response = self.client.get("/django_project_redirect/")
self.assertRedirects(
response, "https://www.djangoproject.com/", fetch_redirect_response=False
)
def test_external_redirect_without_trailing_slash(self):
"""
Client._handle_redirects() with an empty path.
"""
response = self.client.get("/no_trailing_slash_external_redirect/", follow=True)
self.assertRedirects(response, "https://testserver")
def test_external_redirect_with_fetch_error_msg(self):
"""
assertRedirects without fetch_redirect_response=False raises
a relevant ValueError rather than a non-descript AssertionError.
"""
response = self.client.get("/django_project_redirect/")
msg = (
"The test client is unable to fetch remote URLs (got "
"https://www.djangoproject.com/). If the host is served by Django, "
"add 'www.djangoproject.com' to ALLOWED_HOSTS. "
"Otherwise, use assertRedirects(..., fetch_redirect_response=False)."
)
with self.assertRaisesMessage(ValueError, msg):
self.assertRedirects(response, "https://www.djangoproject.com/")
def test_session_modifying_view(self):
"Request a page that modifies the session"
# Session value isn't set initially
with self.assertRaises(KeyError):
self.client.session["tobacconist"]
self.client.post("/session_view/")
# The session was modified
self.assertEqual(self.client.session["tobacconist"], "hovercraft")
@override_settings(
INSTALLED_APPS=[],
SESSION_ENGINE="django.contrib.sessions.backends.file",
)
def test_sessions_app_is_not_installed(self):
self.test_session_modifying_view()
@override_settings(
INSTALLED_APPS=[],
SESSION_ENGINE="django.contrib.sessions.backends.nonexistent",
)
def test_session_engine_is_invalid(self):
with self.assertRaisesMessage(ImportError, "nonexistent"):
self.test_session_modifying_view()
def test_view_with_exception(self):
"Request a page that is known to throw an error"
with self.assertRaises(KeyError):
self.client.get("/broken_view/")
def test_exc_info(self):
client = Client(raise_request_exception=False)
response = client.get("/broken_view/")
self.assertEqual(response.status_code, 500)
exc_type, exc_value, exc_traceback = response.exc_info
self.assertIs(exc_type, KeyError)
self.assertIsInstance(exc_value, KeyError)
self.assertEqual(str(exc_value), "'Oops! Looks like you wrote some bad code.'")
self.assertIsNotNone(exc_traceback)
def test_exc_info_none(self):
response = self.client.get("/get_view/")
self.assertIsNone(response.exc_info)
def test_mail_sending(self):
"Mail is redirected to a dummy outbox during test setup"
response = self.client.get("/mail_sending_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Test message")
self.assertEqual(mail.outbox[0].body, "This is a test email")
self.assertEqual(mail.outbox[0].from_email, "[email protected]")
self.assertEqual(mail.outbox[0].to[0], "[email protected]")
self.assertEqual(mail.outbox[0].to[1], "[email protected]")
def test_reverse_lazy_decodes(self):
"reverse_lazy() works in the test client"
data = {"var": "data"}
response = self.client.get(reverse_lazy("get_view"), data)
# Check some response details
self.assertContains(response, "This is a test")
def test_relative_redirect(self):
response = self.client.get("/accounts/")
self.assertRedirects(response, "/accounts/login/")
def test_relative_redirect_no_trailing_slash(self):
response = self.client.get("/accounts/no_trailing_slash")
self.assertRedirects(response, "/accounts/login/")
def test_mass_mail_sending(self):
"Mass mail is redirected to a dummy outbox during test setup"
response = self.client.get("/mass_mail_sending_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, "First Test message")
self.assertEqual(mail.outbox[0].body, "This is the first test email")
self.assertEqual(mail.outbox[0].from_email, "[email protected]")
self.assertEqual(mail.outbox[0].to[0], "[email protected]")
self.assertEqual(mail.outbox[0].to[1], "[email protected]")
self.assertEqual(mail.outbox[1].subject, "Second Test message")
self.assertEqual(mail.outbox[1].body, "This is the second test email")
self.assertEqual(mail.outbox[1].from_email, "[email protected]")
self.assertEqual(mail.outbox[1].to[0], "[email protected]")
self.assertEqual(mail.outbox[1].to[1], "[email protected]")
def test_exception_following_nested_client_request(self):
"""
A nested test client request shouldn't clobber exception signals from
the outer client request.
"""
with self.assertRaisesMessage(Exception, "exception message"):
self.client.get("/nesting_exception_view/")
def test_response_raises_multi_arg_exception(self):
"""A request may raise an exception with more than one required arg."""
with self.assertRaises(TwoArgException) as cm:
self.client.get("/two_arg_exception/")
self.assertEqual(cm.exception.args, ("one", "two"))
def test_uploading_temp_file(self):
with tempfile.TemporaryFile() as test_file:
response = self.client.post("/upload_view/", data={"temp_file": test_file})
self.assertEqual(response.content, b"temp_file")
def test_uploading_named_temp_file(self):
with tempfile.NamedTemporaryFile() as test_file:
response = self.client.post(
"/upload_view/",
data={"named_temp_file": test_file},
)
self.assertEqual(response.content, b"named_temp_file")
@override_settings(
MIDDLEWARE=["django.middleware.csrf.CsrfViewMiddleware"],
ROOT_URLCONF="test_client.urls",
)
class CSRFEnabledClientTests(SimpleTestCase):
def test_csrf_enabled_client(self):
"A client can be instantiated with CSRF checks enabled"
csrf_client = Client(enforce_csrf_checks=True)
# The normal client allows the post
response = self.client.post("/post_view/", {})
self.assertEqual(response.status_code, 200)
# The CSRF-enabled client rejects it
response = csrf_client.post("/post_view/", {})
self.assertEqual(response.status_code, 403)
class CustomTestClient(Client):
i_am_customized = "Yes"
class CustomTestClientTest(SimpleTestCase):
client_class = CustomTestClient
def test_custom_test_client(self):
"""A test case can specify a custom class for self.client."""
self.assertIs(hasattr(self.client, "i_am_customized"), True)
def _generic_view(request):
return HttpResponse(status=200)
@override_settings(ROOT_URLCONF="test_client.urls")
class RequestFactoryTest(SimpleTestCase):
"""Tests for the request factory."""
# A mapping between names of HTTP/1.1 methods and their test views.
http_methods_and_views = (
("get", get_view),
("post", post_view),
("put", _generic_view),
("patch", _generic_view),
("delete", _generic_view),
("head", _generic_view),
("options", _generic_view),
("trace", trace_view),
)
request_factory = RequestFactory()
def test_request_factory(self):
"""The request factory implements all the HTTP/1.1 methods."""
for method_name, view in self.http_methods_and_views:
method = getattr(self.request_factory, method_name)
request = method("/somewhere/")
response = view(request)
self.assertEqual(response.status_code, 200)
def test_get_request_from_factory(self):
"""
The request factory returns a templated response for a GET request.
"""
request = self.request_factory.get("/somewhere/")
response = get_view(request)
self.assertContains(response, "This is a test")
def test_trace_request_from_factory(self):
"""The request factory returns an echo response for a TRACE request."""
url_path = "/somewhere/"
request = self.request_factory.trace(url_path)
response = trace_view(request)
protocol = request.META["SERVER_PROTOCOL"]
echoed_request_line = "TRACE {} {}".format(url_path, protocol)
self.assertContains(response, echoed_request_line)
@override_settings(ROOT_URLCONF="test_client.urls")
class AsyncClientTest(TestCase):
async def test_response_resolver_match(self):
response = await self.async_client.get("/async_get_view/")
self.assertTrue(hasattr(response, "resolver_match"))
self.assertEqual(response.resolver_match.url_name, "async_get_view")
@modify_settings(
MIDDLEWARE={"prepend": "test_client.tests.async_middleware_urlconf"},
)
async def test_response_resolver_match_middleware_urlconf(self):
response = await self.async_client.get("/middleware_urlconf_view/")
self.assertEqual(response.resolver_match.url_name, "middleware_urlconf_view")
async def test_follow_parameter_not_implemented(self):
msg = "AsyncClient request methods do not accept the follow parameter."
tests = (
"get",
"post",
"put",
"patch",
"delete",
"head",
"options",
"trace",
)
for method_name in tests:
with self.subTest(method=method_name):
method = getattr(self.async_client, method_name)
with self.assertRaisesMessage(NotImplementedError, msg):
await method("/redirect_view/", follow=True)
async def test_get_data(self):
response = await self.async_client.get("/get_view/", {"var": "val"})
self.assertContains(response, "This is a test. val is the value.")
@override_settings(ROOT_URLCONF="test_client.urls")
class AsyncRequestFactoryTest(SimpleTestCase):
request_factory = AsyncRequestFactory()
async def test_request_factory(self):
tests = (
"get",
"post",
"put",
"patch",
"delete",
"head",
"options",
"trace",
)
for method_name in tests:
with self.subTest(method=method_name):
async def async_generic_view(request):
if request.method.lower() != method_name:
return HttpResponseNotAllowed(method_name)
return HttpResponse(status=200)
method = getattr(self.request_factory, method_name)
request = method("/somewhere/")
response = await async_generic_view(request)
self.assertEqual(response.status_code, 200)
async def test_request_factory_data(self):
async def async_generic_view(request):
return HttpResponse(status=200, content=request.body)
request = self.request_factory.post(
"/somewhere/",
data={"example": "data"},
content_type="application/json",
)
self.assertEqual(request.headers["content-length"], "19")
self.assertEqual(request.headers["content-type"], "application/json")
response = await async_generic_view(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'{"example": "data"}')
def test_request_factory_sets_headers(self):
request = self.request_factory.get(
"/somewhere/",
AUTHORIZATION="Bearer faketoken",
X_ANOTHER_HEADER="some other value",
)
self.assertEqual(request.headers["authorization"], "Bearer faketoken")
self.assertIn("HTTP_AUTHORIZATION", request.META)
self.assertEqual(request.headers["x-another-header"], "some other value")
self.assertIn("HTTP_X_ANOTHER_HEADER", request.META)
def test_request_factory_query_string(self):
request = self.request_factory.get("/somewhere/", {"example": "data"})
self.assertNotIn("Query-String", request.headers)
self.assertEqual(request.GET["example"], "data")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.