hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
0c49c49052f14d849114ccf3628d31d634648dc18fc5a72986c3ce4792e0f1fd | from django.test import modify_settings
from . import PostgreSQLTestCase
from .models import CharFieldModel, TextFieldModel
try:
from django.contrib.postgres.search import (
TrigramDistance,
TrigramSimilarity,
TrigramWordDistance,
TrigramWordSimilarity,
)
except ImportError:
pass
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class TrigramTest(PostgreSQLTestCase):
Model = CharFieldModel
@classmethod
def setUpTestData(cls):
cls.Model.objects.bulk_create(
[
cls.Model(field="Matthew"),
cls.Model(field="Cat sat on mat."),
cls.Model(field="Dog sat on rug."),
]
)
def test_trigram_search(self):
self.assertQuerysetEqual(
self.Model.objects.filter(field__trigram_similar="Mathew"),
["Matthew"],
transform=lambda instance: instance.field,
)
def test_trigram_word_search(self):
obj = self.Model.objects.create(
field="Gumby rides on the path of Middlesbrough",
)
self.assertSequenceEqual(
self.Model.objects.filter(field__trigram_word_similar="Middlesborough"),
[obj],
)
def test_trigram_similarity(self):
search = "Bat sat on cat."
# Round result of similarity because PostgreSQL 12+ uses greater
# precision.
self.assertQuerysetEqual(
self.Model.objects.filter(
field__trigram_similar=search,
)
.annotate(similarity=TrigramSimilarity("field", search))
.order_by("-similarity"),
[("Cat sat on mat.", 0.625), ("Dog sat on rug.", 0.333333)],
transform=lambda instance: (instance.field, round(instance.similarity, 6)),
ordered=True,
)
def test_trigram_word_similarity(self):
search = "mat"
self.assertSequenceEqual(
self.Model.objects.filter(
field__trigram_word_similar=search,
)
.annotate(
word_similarity=TrigramWordSimilarity(search, "field"),
)
.values("field", "word_similarity")
.order_by("-word_similarity"),
[
{"field": "Cat sat on mat.", "word_similarity": 1.0},
{"field": "Matthew", "word_similarity": 0.75},
],
)
def test_trigram_similarity_alternate(self):
# Round result of distance because PostgreSQL 12+ uses greater
# precision.
self.assertQuerysetEqual(
self.Model.objects.annotate(
distance=TrigramDistance("field", "Bat sat on cat."),
)
.filter(distance__lte=0.7)
.order_by("distance"),
[("Cat sat on mat.", 0.375), ("Dog sat on rug.", 0.666667)],
transform=lambda instance: (instance.field, round(instance.distance, 6)),
ordered=True,
)
def test_trigram_word_similarity_alternate(self):
self.assertSequenceEqual(
self.Model.objects.annotate(
word_distance=TrigramWordDistance("mat", "field"),
)
.filter(
word_distance__lte=0.7,
)
.values("field", "word_distance")
.order_by("word_distance"),
[
{"field": "Cat sat on mat.", "word_distance": 0},
{"field": "Matthew", "word_distance": 0.25},
],
)
class TrigramTextFieldTest(TrigramTest):
"""
TextField has the same behavior as CharField regarding trigram lookups.
"""
Model = TextFieldModel
|
f555f4efa8c3b0e30cd70ca87421938c5a5cdff90b33e18ba1798a5258f67d5d | from django.db import connection
from django.db.models import (
CharField,
F,
Func,
IntegerField,
OuterRef,
Q,
Subquery,
Value,
)
from django.db.models.fields.json import KeyTextTransform, KeyTransform
from django.db.models.functions import Cast, Concat, Substr
from django.test import skipUnlessDBFeature
from django.test.utils import Approximate, ignore_warnings
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango50Warning
from . import PostgreSQLTestCase
from .models import AggregateTestModel, HotelReservation, Room, StatTestModel
try:
from django.contrib.postgres.aggregates import (
ArrayAgg,
BitAnd,
BitOr,
BitXor,
BoolAnd,
BoolOr,
Corr,
CovarPop,
JSONBAgg,
RegrAvgX,
RegrAvgY,
RegrCount,
RegrIntercept,
RegrR2,
RegrSlope,
RegrSXX,
RegrSXY,
RegrSYY,
StatAggregate,
StringAgg,
)
from django.contrib.postgres.fields import ArrayField
except ImportError:
pass # psycopg2 is not installed
class TestGeneralAggregate(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.aggs = AggregateTestModel.objects.bulk_create(
[
AggregateTestModel(
boolean_field=True,
char_field="Foo1",
text_field="Text1",
integer_field=0,
),
AggregateTestModel(
boolean_field=False,
char_field="Foo2",
text_field="Text2",
integer_field=1,
json_field={"lang": "pl"},
),
AggregateTestModel(
boolean_field=False,
char_field="Foo4",
text_field="Text4",
integer_field=2,
json_field={"lang": "en"},
),
AggregateTestModel(
boolean_field=True,
char_field="Foo3",
text_field="Text3",
integer_field=0,
json_field={"breed": "collie"},
),
]
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_empty_result_set(self):
AggregateTestModel.objects.all().delete()
tests = [
(ArrayAgg("char_field"), []),
(ArrayAgg("integer_field"), []),
(ArrayAgg("boolean_field"), []),
(BitAnd("integer_field"), None),
(BitOr("integer_field"), None),
(BoolAnd("boolean_field"), None),
(BoolOr("boolean_field"), None),
(JSONBAgg("integer_field"), []),
(StringAgg("char_field", delimiter=";"), ""),
]
if connection.features.has_bit_xor:
tests.append((BitXor("integer_field"), None))
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = AggregateTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = AggregateTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_default_argument(self):
AggregateTestModel.objects.all().delete()
tests = [
(ArrayAgg("char_field", default=["<empty>"]), ["<empty>"]),
(ArrayAgg("integer_field", default=[0]), [0]),
(ArrayAgg("boolean_field", default=[False]), [False]),
(BitAnd("integer_field", default=0), 0),
(BitOr("integer_field", default=0), 0),
(BoolAnd("boolean_field", default=False), False),
(BoolOr("boolean_field", default=False), False),
(JSONBAgg("integer_field", default=Value('["<empty>"]')), ["<empty>"]),
(
StringAgg("char_field", delimiter=";", default=Value("<empty>")),
"<empty>",
),
]
if connection.features.has_bit_xor:
tests.append((BitXor("integer_field", default=0), 0))
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = AggregateTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = AggregateTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_convert_value_deprecation(self):
AggregateTestModel.objects.all().delete()
queryset = AggregateTestModel.objects.all()
with self.assertWarnsMessage(
RemovedInDjango50Warning, ArrayAgg.deprecation_msg
):
queryset.aggregate(aggregation=ArrayAgg("boolean_field"))
with self.assertWarnsMessage(
RemovedInDjango50Warning, JSONBAgg.deprecation_msg
):
queryset.aggregate(aggregation=JSONBAgg("integer_field"))
with self.assertWarnsMessage(
RemovedInDjango50Warning, StringAgg.deprecation_msg
):
queryset.aggregate(aggregation=StringAgg("char_field", delimiter=";"))
# No warnings raised if default argument provided.
self.assertEqual(
queryset.aggregate(aggregation=ArrayAgg("boolean_field", default=None)),
{"aggregation": None},
)
self.assertEqual(
queryset.aggregate(aggregation=JSONBAgg("integer_field", default=None)),
{"aggregation": None},
)
self.assertEqual(
queryset.aggregate(
aggregation=StringAgg("char_field", delimiter=";", default=None),
),
{"aggregation": None},
)
self.assertEqual(
queryset.aggregate(
aggregation=ArrayAgg("boolean_field", default=Value([]))
),
{"aggregation": []},
)
self.assertEqual(
queryset.aggregate(
aggregation=JSONBAgg("integer_field", default=Value("[]"))
),
{"aggregation": []},
)
self.assertEqual(
queryset.aggregate(
aggregation=StringAgg("char_field", delimiter=";", default=Value("")),
),
{"aggregation": ""},
)
def test_array_agg_charfield(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg("char_field"))
self.assertEqual(values, {"arrayagg": ["Foo1", "Foo2", "Foo4", "Foo3"]})
def test_array_agg_charfield_ordering(self):
ordering_test_cases = (
(F("char_field").desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
(F("char_field").asc(), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(F("char_field"), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(
[F("boolean_field"), F("char_field").desc()],
["Foo4", "Foo2", "Foo3", "Foo1"],
),
(
(F("boolean_field"), F("char_field").desc()),
["Foo4", "Foo2", "Foo3", "Foo1"],
),
("char_field", ["Foo1", "Foo2", "Foo3", "Foo4"]),
("-char_field", ["Foo4", "Foo3", "Foo2", "Foo1"]),
(Concat("char_field", Value("@")), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(Concat("char_field", Value("@")).desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
(
(
Substr("char_field", 1, 1),
F("integer_field"),
Substr("char_field", 4, 1).desc(),
),
["Foo3", "Foo1", "Foo2", "Foo4"],
),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("char_field", ordering=ordering)
)
self.assertEqual(values, {"arrayagg": expected_output})
def test_array_agg_integerfield(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field")
)
self.assertEqual(values, {"arrayagg": [0, 1, 2, 0]})
def test_array_agg_integerfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field", ordering=F("integer_field").desc())
)
self.assertEqual(values, {"arrayagg": [2, 1, 0, 0]})
def test_array_agg_booleanfield(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("boolean_field")
)
self.assertEqual(values, {"arrayagg": [True, False, False, True]})
def test_array_agg_booleanfield_ordering(self):
ordering_test_cases = (
(F("boolean_field").asc(), [False, False, True, True]),
(F("boolean_field").desc(), [True, True, False, False]),
(F("boolean_field"), [False, False, True, True]),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("boolean_field", ordering=ordering)
)
self.assertEqual(values, {"arrayagg": expected_output})
def test_array_agg_jsonfield(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg(
KeyTransform("lang", "json_field"),
filter=Q(json_field__lang__isnull=False),
),
)
self.assertEqual(values, {"arrayagg": ["pl", "en"]})
def test_array_agg_jsonfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg(
KeyTransform("lang", "json_field"),
filter=Q(json_field__lang__isnull=False),
ordering=KeyTransform("lang", "json_field"),
),
)
self.assertEqual(values, {"arrayagg": ["en", "pl"]})
def test_array_agg_filter(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field", filter=Q(integer_field__gt=0)),
)
self.assertEqual(values, {"arrayagg": [1, 2]})
def test_array_agg_lookups(self):
aggr1 = AggregateTestModel.objects.create()
aggr2 = AggregateTestModel.objects.create()
StatTestModel.objects.create(related_field=aggr1, int1=1, int2=0)
StatTestModel.objects.create(related_field=aggr1, int1=2, int2=0)
StatTestModel.objects.create(related_field=aggr2, int1=3, int2=0)
StatTestModel.objects.create(related_field=aggr2, int1=4, int2=0)
qs = (
StatTestModel.objects.values("related_field")
.annotate(array=ArrayAgg("int1"))
.filter(array__overlap=[2])
.values_list("array", flat=True)
)
self.assertCountEqual(qs.get(), [1, 2])
def test_bit_and_general(self):
values = AggregateTestModel.objects.filter(integer_field__in=[0, 1]).aggregate(
bitand=BitAnd("integer_field")
)
self.assertEqual(values, {"bitand": 0})
def test_bit_and_on_only_true_values(self):
values = AggregateTestModel.objects.filter(integer_field=1).aggregate(
bitand=BitAnd("integer_field")
)
self.assertEqual(values, {"bitand": 1})
def test_bit_and_on_only_false_values(self):
values = AggregateTestModel.objects.filter(integer_field=0).aggregate(
bitand=BitAnd("integer_field")
)
self.assertEqual(values, {"bitand": 0})
def test_bit_or_general(self):
values = AggregateTestModel.objects.filter(integer_field__in=[0, 1]).aggregate(
bitor=BitOr("integer_field")
)
self.assertEqual(values, {"bitor": 1})
def test_bit_or_on_only_true_values(self):
values = AggregateTestModel.objects.filter(integer_field=1).aggregate(
bitor=BitOr("integer_field")
)
self.assertEqual(values, {"bitor": 1})
def test_bit_or_on_only_false_values(self):
values = AggregateTestModel.objects.filter(integer_field=0).aggregate(
bitor=BitOr("integer_field")
)
self.assertEqual(values, {"bitor": 0})
@skipUnlessDBFeature("has_bit_xor")
def test_bit_xor_general(self):
AggregateTestModel.objects.create(integer_field=3)
values = AggregateTestModel.objects.filter(
integer_field__in=[1, 3],
).aggregate(bitxor=BitXor("integer_field"))
self.assertEqual(values, {"bitxor": 2})
@skipUnlessDBFeature("has_bit_xor")
def test_bit_xor_on_only_true_values(self):
values = AggregateTestModel.objects.filter(
integer_field=1,
).aggregate(bitxor=BitXor("integer_field"))
self.assertEqual(values, {"bitxor": 1})
@skipUnlessDBFeature("has_bit_xor")
def test_bit_xor_on_only_false_values(self):
values = AggregateTestModel.objects.filter(
integer_field=0,
).aggregate(bitxor=BitXor("integer_field"))
self.assertEqual(values, {"bitxor": 0})
def test_bool_and_general(self):
values = AggregateTestModel.objects.aggregate(booland=BoolAnd("boolean_field"))
self.assertEqual(values, {"booland": False})
def test_bool_and_q_object(self):
values = AggregateTestModel.objects.aggregate(
booland=BoolAnd(Q(integer_field__gt=2)),
)
self.assertEqual(values, {"booland": False})
def test_bool_or_general(self):
values = AggregateTestModel.objects.aggregate(boolor=BoolOr("boolean_field"))
self.assertEqual(values, {"boolor": True})
def test_bool_or_q_object(self):
values = AggregateTestModel.objects.aggregate(
boolor=BoolOr(Q(integer_field__gt=2)),
)
self.assertEqual(values, {"boolor": False})
def test_string_agg_requires_delimiter(self):
with self.assertRaises(TypeError):
AggregateTestModel.objects.aggregate(stringagg=StringAgg("char_field"))
def test_string_agg_delimiter_escaping(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter="'")
)
self.assertEqual(values, {"stringagg": "Foo1'Foo2'Foo4'Foo3"})
def test_string_agg_charfield(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter=";")
)
self.assertEqual(values, {"stringagg": "Foo1;Foo2;Foo4;Foo3"})
def test_string_agg_default_output_field(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("text_field", delimiter=";"),
)
self.assertEqual(values, {"stringagg": "Text1;Text2;Text4;Text3"})
def test_string_agg_charfield_ordering(self):
ordering_test_cases = (
(F("char_field").desc(), "Foo4;Foo3;Foo2;Foo1"),
(F("char_field").asc(), "Foo1;Foo2;Foo3;Foo4"),
(F("char_field"), "Foo1;Foo2;Foo3;Foo4"),
("char_field", "Foo1;Foo2;Foo3;Foo4"),
("-char_field", "Foo4;Foo3;Foo2;Foo1"),
(Concat("char_field", Value("@")), "Foo1;Foo2;Foo3;Foo4"),
(Concat("char_field", Value("@")).desc(), "Foo4;Foo3;Foo2;Foo1"),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter=";", ordering=ordering)
)
self.assertEqual(values, {"stringagg": expected_output})
def test_string_agg_jsonfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg(
KeyTextTransform("lang", "json_field"),
delimiter=";",
ordering=KeyTextTransform("lang", "json_field"),
output_field=CharField(),
),
)
self.assertEqual(values, {"stringagg": "en;pl"})
def test_string_agg_filter(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg(
"char_field",
delimiter=";",
filter=Q(char_field__endswith="3") | Q(char_field__endswith="1"),
)
)
self.assertEqual(values, {"stringagg": "Foo1;Foo3"})
def test_orderable_agg_alternative_fields(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("integer_field", ordering=F("char_field").asc())
)
self.assertEqual(values, {"arrayagg": [0, 1, 0, 2]})
def test_jsonb_agg(self):
values = AggregateTestModel.objects.aggregate(jsonbagg=JSONBAgg("char_field"))
self.assertEqual(values, {"jsonbagg": ["Foo1", "Foo2", "Foo4", "Foo3"]})
def test_jsonb_agg_charfield_ordering(self):
ordering_test_cases = (
(F("char_field").desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
(F("char_field").asc(), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(F("char_field"), ["Foo1", "Foo2", "Foo3", "Foo4"]),
("char_field", ["Foo1", "Foo2", "Foo3", "Foo4"]),
("-char_field", ["Foo4", "Foo3", "Foo2", "Foo1"]),
(Concat("char_field", Value("@")), ["Foo1", "Foo2", "Foo3", "Foo4"]),
(Concat("char_field", Value("@")).desc(), ["Foo4", "Foo3", "Foo2", "Foo1"]),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("char_field", ordering=ordering),
)
self.assertEqual(values, {"jsonbagg": expected_output})
def test_jsonb_agg_integerfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("integer_field", ordering=F("integer_field").desc()),
)
self.assertEqual(values, {"jsonbagg": [2, 1, 0, 0]})
def test_jsonb_agg_booleanfield_ordering(self):
ordering_test_cases = (
(F("boolean_field").asc(), [False, False, True, True]),
(F("boolean_field").desc(), [True, True, False, False]),
(F("boolean_field"), [False, False, True, True]),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("boolean_field", ordering=ordering),
)
self.assertEqual(values, {"jsonbagg": expected_output})
def test_jsonb_agg_jsonfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg(
KeyTransform("lang", "json_field"),
filter=Q(json_field__lang__isnull=False),
ordering=KeyTransform("lang", "json_field"),
),
)
self.assertEqual(values, {"jsonbagg": ["en", "pl"]})
def test_jsonb_agg_key_index_transforms(self):
room101 = Room.objects.create(number=101)
room102 = Room.objects.create(number=102)
datetimes = [
timezone.datetime(2018, 6, 20),
timezone.datetime(2018, 6, 24),
timezone.datetime(2018, 6, 28),
]
HotelReservation.objects.create(
datespan=(datetimes[0].date(), datetimes[1].date()),
start=datetimes[0],
end=datetimes[1],
room=room102,
requirements={"double_bed": True, "parking": True},
)
HotelReservation.objects.create(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
requirements={"double_bed": False, "sea_view": True, "parking": False},
)
HotelReservation.objects.create(
datespan=(datetimes[0].date(), datetimes[2].date()),
start=datetimes[0],
end=datetimes[2],
room=room101,
requirements={"sea_view": False},
)
values = (
Room.objects.annotate(
requirements=JSONBAgg(
"hotelreservation__requirements",
ordering="-hotelreservation__start",
)
)
.filter(requirements__0__sea_view=True)
.values("number", "requirements")
)
self.assertSequenceEqual(
values,
[
{
"number": 102,
"requirements": [
{"double_bed": False, "sea_view": True, "parking": False},
{"double_bed": True, "parking": True},
],
},
],
)
def test_string_agg_array_agg_ordering_in_subquery(self):
stats = []
for i, agg in enumerate(AggregateTestModel.objects.order_by("char_field")):
stats.append(StatTestModel(related_field=agg, int1=i, int2=i + 1))
stats.append(StatTestModel(related_field=agg, int1=i + 1, int2=i))
StatTestModel.objects.bulk_create(stats)
for aggregate, expected_result in (
(
ArrayAgg("stattestmodel__int1", ordering="-stattestmodel__int2"),
[
("Foo1", [0, 1]),
("Foo2", [1, 2]),
("Foo3", [2, 3]),
("Foo4", [3, 4]),
],
),
(
StringAgg(
Cast("stattestmodel__int1", CharField()),
delimiter=";",
ordering="-stattestmodel__int2",
),
[("Foo1", "0;1"), ("Foo2", "1;2"), ("Foo3", "2;3"), ("Foo4", "3;4")],
),
):
with self.subTest(aggregate=aggregate.__class__.__name__):
subquery = (
AggregateTestModel.objects.filter(
pk=OuterRef("pk"),
)
.annotate(agg=aggregate)
.values("agg")
)
values = (
AggregateTestModel.objects.annotate(
agg=Subquery(subquery),
)
.order_by("char_field")
.values_list("char_field", "agg")
)
self.assertEqual(list(values), expected_result)
def test_string_agg_array_agg_filter_in_subquery(self):
StatTestModel.objects.bulk_create(
[
StatTestModel(related_field=self.aggs[0], int1=0, int2=5),
StatTestModel(related_field=self.aggs[0], int1=1, int2=4),
StatTestModel(related_field=self.aggs[0], int1=2, int2=3),
]
)
for aggregate, expected_result in (
(
ArrayAgg("stattestmodel__int1", filter=Q(stattestmodel__int2__gt=3)),
[("Foo1", [0, 1]), ("Foo2", None)],
),
(
StringAgg(
Cast("stattestmodel__int2", CharField()),
delimiter=";",
filter=Q(stattestmodel__int1__lt=2),
),
[("Foo1", "5;4"), ("Foo2", None)],
),
):
with self.subTest(aggregate=aggregate.__class__.__name__):
subquery = (
AggregateTestModel.objects.filter(
pk=OuterRef("pk"),
)
.annotate(agg=aggregate)
.values("agg")
)
values = (
AggregateTestModel.objects.annotate(
agg=Subquery(subquery),
)
.filter(
char_field__in=["Foo1", "Foo2"],
)
.order_by("char_field")
.values_list("char_field", "agg")
)
self.assertEqual(list(values), expected_result)
def test_string_agg_filter_in_subquery_with_exclude(self):
subquery = (
AggregateTestModel.objects.annotate(
stringagg=StringAgg(
"char_field",
delimiter=";",
filter=Q(char_field__endswith="1"),
)
)
.exclude(stringagg="")
.values("id")
)
self.assertSequenceEqual(
AggregateTestModel.objects.filter(id__in=Subquery(subquery)),
[self.aggs[0]],
)
def test_ordering_isnt_cleared_for_array_subquery(self):
inner_qs = AggregateTestModel.objects.order_by("-integer_field")
qs = AggregateTestModel.objects.annotate(
integers=Func(
Subquery(inner_qs.values("integer_field")),
function="ARRAY",
output_field=ArrayField(base_field=IntegerField()),
),
)
self.assertSequenceEqual(
qs.first().integers,
inner_qs.values_list("integer_field", flat=True),
)
class TestAggregateDistinct(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
AggregateTestModel.objects.create(char_field="Foo")
AggregateTestModel.objects.create(char_field="Foo")
AggregateTestModel.objects.create(char_field="Bar")
def test_string_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter=" ", distinct=False)
)
self.assertEqual(values["stringagg"].count("Foo"), 2)
self.assertEqual(values["stringagg"].count("Bar"), 1)
def test_string_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg("char_field", delimiter=" ", distinct=True)
)
self.assertEqual(values["stringagg"].count("Foo"), 1)
self.assertEqual(values["stringagg"].count("Bar"), 1)
def test_array_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("char_field", distinct=False)
)
self.assertEqual(sorted(values["arrayagg"]), ["Bar", "Foo", "Foo"])
def test_array_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg("char_field", distinct=True)
)
self.assertEqual(sorted(values["arrayagg"]), ["Bar", "Foo"])
def test_jsonb_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("char_field", distinct=False),
)
self.assertEqual(sorted(values["jsonbagg"]), ["Bar", "Foo", "Foo"])
def test_jsonb_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(
jsonbagg=JSONBAgg("char_field", distinct=True),
)
self.assertEqual(sorted(values["jsonbagg"]), ["Bar", "Foo"])
class TestStatisticsAggregate(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
StatTestModel.objects.create(
int1=1,
int2=3,
related_field=AggregateTestModel.objects.create(integer_field=0),
)
StatTestModel.objects.create(
int1=2,
int2=2,
related_field=AggregateTestModel.objects.create(integer_field=1),
)
StatTestModel.objects.create(
int1=3,
int2=1,
related_field=AggregateTestModel.objects.create(integer_field=2),
)
# Tests for base class (StatAggregate)
def test_missing_arguments_raises_exception(self):
with self.assertRaisesMessage(ValueError, "Both y and x must be provided."):
StatAggregate(x=None, y=None)
def test_correct_source_expressions(self):
func = StatAggregate(x="test", y=13)
self.assertIsInstance(func.source_expressions[0], Value)
self.assertIsInstance(func.source_expressions[1], F)
def test_alias_is_required(self):
class SomeFunc(StatAggregate):
function = "TEST"
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
StatTestModel.objects.aggregate(SomeFunc(y="int2", x="int1"))
# Test aggregates
def test_empty_result_set(self):
StatTestModel.objects.all().delete()
tests = [
(Corr(y="int2", x="int1"), None),
(CovarPop(y="int2", x="int1"), None),
(CovarPop(y="int2", x="int1", sample=True), None),
(RegrAvgX(y="int2", x="int1"), None),
(RegrAvgY(y="int2", x="int1"), None),
(RegrCount(y="int2", x="int1"), 0),
(RegrIntercept(y="int2", x="int1"), None),
(RegrR2(y="int2", x="int1"), None),
(RegrSlope(y="int2", x="int1"), None),
(RegrSXX(y="int2", x="int1"), None),
(RegrSXY(y="int2", x="int1"), None),
(RegrSYY(y="int2", x="int1"), None),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = StatTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = StatTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_default_argument(self):
StatTestModel.objects.all().delete()
tests = [
(Corr(y="int2", x="int1", default=0), 0),
(CovarPop(y="int2", x="int1", default=0), 0),
(CovarPop(y="int2", x="int1", sample=True, default=0), 0),
(RegrAvgX(y="int2", x="int1", default=0), 0),
(RegrAvgY(y="int2", x="int1", default=0), 0),
# RegrCount() doesn't support the default argument.
(RegrIntercept(y="int2", x="int1", default=0), 0),
(RegrR2(y="int2", x="int1", default=0), 0),
(RegrSlope(y="int2", x="int1", default=0), 0),
(RegrSXX(y="int2", x="int1", default=0), 0),
(RegrSXY(y="int2", x="int1", default=0), 0),
(RegrSYY(y="int2", x="int1", default=0), 0),
]
for aggregation, expected_result in tests:
with self.subTest(aggregation=aggregation):
# Empty result with non-execution optimization.
with self.assertNumQueries(0):
values = StatTestModel.objects.none().aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
# Empty result when query must be executed.
with self.assertNumQueries(1):
values = StatTestModel.objects.aggregate(
aggregation=aggregation,
)
self.assertEqual(values, {"aggregation": expected_result})
def test_corr_general(self):
values = StatTestModel.objects.aggregate(corr=Corr(y="int2", x="int1"))
self.assertEqual(values, {"corr": -1.0})
def test_covar_pop_general(self):
values = StatTestModel.objects.aggregate(covarpop=CovarPop(y="int2", x="int1"))
self.assertEqual(values, {"covarpop": Approximate(-0.66, places=1)})
def test_covar_pop_sample(self):
values = StatTestModel.objects.aggregate(
covarpop=CovarPop(y="int2", x="int1", sample=True)
)
self.assertEqual(values, {"covarpop": -1.0})
def test_regr_avgx_general(self):
values = StatTestModel.objects.aggregate(regravgx=RegrAvgX(y="int2", x="int1"))
self.assertEqual(values, {"regravgx": 2.0})
def test_regr_avgy_general(self):
values = StatTestModel.objects.aggregate(regravgy=RegrAvgY(y="int2", x="int1"))
self.assertEqual(values, {"regravgy": 2.0})
def test_regr_count_general(self):
values = StatTestModel.objects.aggregate(
regrcount=RegrCount(y="int2", x="int1")
)
self.assertEqual(values, {"regrcount": 3})
def test_regr_count_default(self):
msg = "RegrCount does not allow default."
with self.assertRaisesMessage(TypeError, msg):
RegrCount(y="int2", x="int1", default=0)
def test_regr_intercept_general(self):
values = StatTestModel.objects.aggregate(
regrintercept=RegrIntercept(y="int2", x="int1")
)
self.assertEqual(values, {"regrintercept": 4})
def test_regr_r2_general(self):
values = StatTestModel.objects.aggregate(regrr2=RegrR2(y="int2", x="int1"))
self.assertEqual(values, {"regrr2": 1})
def test_regr_slope_general(self):
values = StatTestModel.objects.aggregate(
regrslope=RegrSlope(y="int2", x="int1")
)
self.assertEqual(values, {"regrslope": -1})
def test_regr_sxx_general(self):
values = StatTestModel.objects.aggregate(regrsxx=RegrSXX(y="int2", x="int1"))
self.assertEqual(values, {"regrsxx": 2.0})
def test_regr_sxy_general(self):
values = StatTestModel.objects.aggregate(regrsxy=RegrSXY(y="int2", x="int1"))
self.assertEqual(values, {"regrsxy": -2.0})
def test_regr_syy_general(self):
values = StatTestModel.objects.aggregate(regrsyy=RegrSYY(y="int2", x="int1"))
self.assertEqual(values, {"regrsyy": 2.0})
def test_regr_avgx_with_related_obj_and_number_as_argument(self):
"""
This is more complex test to check if JOIN on field and
number as argument works as expected.
"""
values = StatTestModel.objects.aggregate(
complex_regravgx=RegrAvgX(y=5, x="related_field__integer_field")
)
self.assertEqual(values, {"complex_regravgx": 1.0})
|
63cae087a9a517a27d18b45293f2843eb6cfeaa5f1ba8108687d8ee5b1d20693 | """
Test PostgreSQL full text search.
These tests use dialogue from the 1975 film Monty Python and the Holy Grail.
All text copyright Python (Monty) Pictures. Thanks to sacred-texts.com for the
transcript.
"""
from django.db import connection
from django.db.models import F, Value
from django.test import modify_settings, skipUnlessDBFeature
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import Character, Line, LineSavedSearch, Scene
try:
from django.contrib.postgres.search import (
SearchConfig,
SearchHeadline,
SearchQuery,
SearchRank,
SearchVector,
)
except ImportError:
pass
class GrailTestData:
@classmethod
def setUpTestData(cls):
cls.robin = Scene.objects.create(
scene="Scene 10", setting="The dark forest of Ewing"
)
cls.minstrel = Character.objects.create(name="Minstrel")
verses = [
(
"Bravely bold Sir Robin, rode forth from Camelot. "
"He was not afraid to die, o Brave Sir Robin. "
"He was not at all afraid to be killed in nasty ways. "
"Brave, brave, brave, brave Sir Robin"
),
(
"He was not in the least bit scared to be mashed into a pulp, "
"Or to have his eyes gouged out, and his elbows broken. "
"To have his kneecaps split, and his body burned away, "
"And his limbs all hacked and mangled, brave Sir Robin!"
),
(
"His head smashed in and his heart cut out, "
"And his liver removed and his bowels unplugged, "
"And his nostrils ripped and his bottom burned off,"
"And his --"
),
]
cls.verses = [
Line.objects.create(
scene=cls.robin,
character=cls.minstrel,
dialogue=verse,
)
for verse in verses
]
cls.verse0, cls.verse1, cls.verse2 = cls.verses
cls.witch_scene = Scene.objects.create(
scene="Scene 5", setting="Sir Bedemir's Castle"
)
bedemir = Character.objects.create(name="Bedemir")
crowd = Character.objects.create(name="Crowd")
witch = Character.objects.create(name="Witch")
duck = Character.objects.create(name="Duck")
cls.bedemir0 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue="We shall use my larger scales!",
dialogue_config="english",
)
cls.bedemir1 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue="Right, remove the supports!",
dialogue_config="english",
)
cls.duck = Line.objects.create(
scene=cls.witch_scene, character=duck, dialogue=None
)
cls.crowd = Line.objects.create(
scene=cls.witch_scene, character=crowd, dialogue="A witch! A witch!"
)
cls.witch = Line.objects.create(
scene=cls.witch_scene, character=witch, dialogue="It's a fair cop."
)
trojan_rabbit = Scene.objects.create(
scene="Scene 8", setting="The castle of Our Master Ruiz' de lu la Ramper"
)
guards = Character.objects.create(name="French Guards")
cls.french = Line.objects.create(
scene=trojan_rabbit,
character=guards,
dialogue="Oh. Un beau cadeau. Oui oui.",
dialogue_config="french",
)
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class SimpleSearchTest(GrailTestData, PostgreSQLTestCase):
def test_simple(self):
searched = Line.objects.filter(dialogue__search="elbows")
self.assertSequenceEqual(searched, [self.verse1])
def test_non_exact_match(self):
searched = Line.objects.filter(dialogue__search="hearts")
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
searched = Line.objects.filter(dialogue__search="heart bowel")
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms_with_partial_match(self):
searched = Line.objects.filter(dialogue__search="Robin killed")
self.assertSequenceEqual(searched, [self.verse0])
def test_search_query_config(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery("nostrils", config="simple"),
)
self.assertSequenceEqual(searched, [self.verse2])
def test_search_with_F_expression(self):
# Non-matching query.
LineSavedSearch.objects.create(line=self.verse1, query="hearts")
# Matching query.
match = LineSavedSearch.objects.create(line=self.verse1, query="elbows")
for query_expression in [F("query"), SearchQuery(F("query"))]:
with self.subTest(query_expression):
searched = LineSavedSearch.objects.filter(
line__dialogue__search=query_expression,
)
self.assertSequenceEqual(searched, [match])
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class SearchVectorFieldTest(GrailTestData, PostgreSQLTestCase):
def test_existing_vector(self):
Line.objects.update(dialogue_search_vector=SearchVector("dialogue"))
searched = Line.objects.filter(
dialogue_search_vector=SearchQuery("Robin killed")
)
self.assertSequenceEqual(searched, [self.verse0])
def test_existing_vector_config_explicit(self):
Line.objects.update(dialogue_search_vector=SearchVector("dialogue"))
searched = Line.objects.filter(
dialogue_search_vector=SearchQuery("cadeaux", config="french")
)
self.assertSequenceEqual(searched, [self.french])
def test_single_coalesce_expression(self):
searched = Line.objects.annotate(search=SearchVector("dialogue")).filter(
search="cadeaux"
)
self.assertNotIn("COALESCE(COALESCE", str(searched.query))
class SearchConfigTests(PostgreSQLSimpleTestCase):
def test_from_parameter(self):
self.assertIsNone(SearchConfig.from_parameter(None))
self.assertEqual(SearchConfig.from_parameter("foo"), SearchConfig("foo"))
self.assertEqual(
SearchConfig.from_parameter(SearchConfig("bar")), SearchConfig("bar")
)
class MultipleFieldsTest(GrailTestData, PostgreSQLTestCase):
def test_simple_on_dialogue(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="elbows")
self.assertSequenceEqual(searched, [self.verse1])
def test_simple_on_scene(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="Forest")
self.assertCountEqual(searched, self.verses)
def test_non_exact_match(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="heart")
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="heart forest")
self.assertSequenceEqual(searched, [self.verse2])
def test_terms_adjacent(self):
searched = Line.objects.annotate(
search=SearchVector("character__name", "dialogue"),
).filter(search="minstrel")
self.assertCountEqual(searched, self.verses)
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="minstrelbravely")
self.assertSequenceEqual(searched, [])
def test_search_with_null(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search="bedemir")
self.assertCountEqual(
searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck]
)
def test_search_with_non_text(self):
searched = Line.objects.annotate(
search=SearchVector("id"),
).filter(search=str(self.crowd.id))
self.assertSequenceEqual(searched, [self.crowd])
def test_phrase_search(self):
line_qs = Line.objects.annotate(search=SearchVector("dialogue"))
searched = line_qs.filter(
search=SearchQuery("burned body his away", search_type="phrase")
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery("his body burned away", search_type="phrase")
)
self.assertSequenceEqual(searched, [self.verse1])
def test_phrase_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
)
searched = line_qs.filter(
search=SearchQuery("cadeau beau un", search_type="phrase", config="french"),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery("un beau cadeau", search_type="phrase", config="french"),
)
self.assertSequenceEqual(searched, [self.french])
def test_raw_search(self):
line_qs = Line.objects.annotate(search=SearchVector("dialogue"))
searched = line_qs.filter(search=SearchQuery("Robin", search_type="raw"))
self.assertCountEqual(searched, [self.verse0, self.verse1])
searched = line_qs.filter(
search=SearchQuery("Robin & !'Camelot'", search_type="raw")
)
self.assertSequenceEqual(searched, [self.verse1])
def test_raw_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector("dialogue", config="french")
)
searched = line_qs.filter(
search=SearchQuery(
"'cadeaux' & 'beaux'", search_type="raw", config="french"
),
)
self.assertSequenceEqual(searched, [self.french])
@skipUnlessDBFeature("has_websearch_to_tsquery")
def test_web_search(self):
line_qs = Line.objects.annotate(search=SearchVector("dialogue"))
searched = line_qs.filter(
search=SearchQuery(
'"burned body" "split kneecaps"',
search_type="websearch",
),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery(
'"body burned" "kneecaps split" -"nostrils"',
search_type="websearch",
),
)
self.assertSequenceEqual(searched, [self.verse1])
searched = line_qs.filter(
search=SearchQuery(
'"Sir Robin" ("kneecaps" OR "Camelot")',
search_type="websearch",
),
)
self.assertSequenceEqual(searched, [self.verse0, self.verse1])
@skipUnlessDBFeature("has_websearch_to_tsquery")
def test_web_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
)
searched = line_qs.filter(
search=SearchQuery(
"cadeau -beau", search_type="websearch", config="french"
),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery("beau cadeau", search_type="websearch", config="french"),
)
self.assertSequenceEqual(searched, [self.french])
def test_bad_search_type(self):
with self.assertRaisesMessage(
ValueError, "Unknown search_type argument 'foo'."
):
SearchQuery("kneecaps", search_type="foo")
def test_config_query_explicit(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
).filter(search=SearchQuery("cadeaux", config="french"))
self.assertSequenceEqual(searched, [self.french])
def test_config_query_implicit(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue", config="french"),
).filter(search="cadeaux")
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_explicit(self):
searched = Line.objects.annotate(
search=SearchVector(
"scene__setting", "dialogue", config=F("dialogue_config")
),
).filter(search=SearchQuery("cadeaux", config=F("dialogue_config")))
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_implicit(self):
searched = Line.objects.annotate(
search=SearchVector(
"scene__setting", "dialogue", config=F("dialogue_config")
),
).filter(search="cadeaux")
self.assertSequenceEqual(searched, [self.french])
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class TestCombinations(GrailTestData, PostgreSQLTestCase):
def test_vector_add(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting") + SearchVector("character__name"),
).filter(search="bedemir")
self.assertCountEqual(
searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck]
)
def test_vector_add_multi(self):
searched = Line.objects.annotate(
search=(
SearchVector("scene__setting")
+ SearchVector("character__name")
+ SearchVector("dialogue")
),
).filter(search="bedemir")
self.assertCountEqual(
searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck]
)
def test_vector_combined_mismatch(self):
msg = (
"SearchVector can only be combined with other SearchVector "
"instances, got NoneType."
)
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None + SearchVector("character__name"))
def test_combine_different_vector_configs(self):
searched = Line.objects.annotate(
search=(
SearchVector("dialogue", config="english")
+ SearchVector("dialogue", config="french")
),
).filter(
search=SearchQuery("cadeaux", config="french") | SearchQuery("nostrils")
)
self.assertCountEqual(searched, [self.french, self.verse2])
def test_query_and(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(search=SearchQuery("bedemir") & SearchQuery("scales"))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_multiple_and(self):
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(
search=SearchQuery("bedemir")
& SearchQuery("scales")
& SearchQuery("nostrils")
)
self.assertSequenceEqual(searched, [])
searched = Line.objects.annotate(
search=SearchVector("scene__setting", "dialogue"),
).filter(
search=SearchQuery("shall") & SearchQuery("use") & SearchQuery("larger")
)
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_or(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery("kneecaps") | SearchQuery("nostrils")
)
self.assertCountEqual(searched, [self.verse1, self.verse2])
def test_query_multiple_or(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery("kneecaps")
| SearchQuery("nostrils")
| SearchQuery("Sir Robin")
)
self.assertCountEqual(searched, [self.verse1, self.verse2, self.verse0])
def test_query_invert(self):
searched = Line.objects.filter(
character=self.minstrel, dialogue__search=~SearchQuery("kneecaps")
)
self.assertCountEqual(searched, [self.verse0, self.verse2])
def test_combine_different_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery("cadeau", config="french")
| SearchQuery("nostrils", config="english")
)
)
self.assertCountEqual(searched, [self.french, self.verse2])
def test_combined_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery("nostrils", config="simple")
& SearchQuery("bowels", config="simple")
),
)
self.assertSequenceEqual(searched, [self.verse2])
def test_combine_raw_phrase(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery("burn:*", search_type="raw", config="simple")
| SearchQuery("rode forth from Camelot", search_type="phrase")
)
)
self.assertCountEqual(searched, [self.verse0, self.verse1, self.verse2])
def test_query_combined_mismatch(self):
msg = (
"SearchQuery can only be combined with other SearchQuery "
"instances, got NoneType."
)
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None | SearchQuery("kneecaps"))
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None & SearchQuery("kneecaps"))
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class TestRankingAndWeights(GrailTestData, PostgreSQLTestCase):
def test_ranking(self):
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"), SearchQuery("brave sir robin")
),
)
.order_by("rank")
)
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_rank_passing_untyped_args(self):
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank("dialogue", "brave sir robin"),
)
.order_by("rank")
)
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_weights_in_vector(self):
vector = SearchVector("dialogue", weight="A") + SearchVector(
"character__name", weight="D"
)
searched = (
Line.objects.filter(scene=self.witch_scene)
.annotate(
rank=SearchRank(vector, SearchQuery("witch")),
)
.order_by("-rank")[:2]
)
self.assertSequenceEqual(searched, [self.crowd, self.witch])
vector = SearchVector("dialogue", weight="D") + SearchVector(
"character__name", weight="A"
)
searched = (
Line.objects.filter(scene=self.witch_scene)
.annotate(
rank=SearchRank(vector, SearchQuery("witch")),
)
.order_by("-rank")[:2]
)
self.assertSequenceEqual(searched, [self.witch, self.crowd])
def test_ranked_custom_weights(self):
vector = SearchVector("dialogue", weight="D") + SearchVector(
"character__name", weight="A"
)
searched = (
Line.objects.filter(scene=self.witch_scene)
.annotate(
rank=SearchRank(vector, SearchQuery("witch"), weights=[1, 0, 0, 0.5]),
)
.order_by("-rank")[:2]
)
self.assertSequenceEqual(searched, [self.crowd, self.witch])
def test_ranking_chaining(self):
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"), SearchQuery("brave sir robin")
),
)
.filter(rank__gt=0.3)
)
self.assertSequenceEqual(searched, [self.verse0])
def test_cover_density_ranking(self):
not_dense_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue=(
"Bravely taking to his feet, he beat a very brave retreat. "
"A brave retreat brave Sir Robin."
),
)
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"),
SearchQuery("brave robin"),
cover_density=True,
),
)
.order_by("rank", "-pk")
)
self.assertSequenceEqual(
searched,
[self.verse2, not_dense_verse, self.verse1, self.verse0],
)
def test_ranking_with_normalization(self):
short_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue="A brave retreat brave Sir Robin.",
)
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"),
SearchQuery("brave sir robin"),
# Divide the rank by the document length.
normalization=2,
),
)
.order_by("rank")
)
self.assertSequenceEqual(
searched,
[self.verse2, self.verse1, self.verse0, short_verse],
)
def test_ranking_with_masked_normalization(self):
short_verse = Line.objects.create(
scene=self.robin,
character=self.minstrel,
dialogue="A brave retreat brave Sir Robin.",
)
searched = (
Line.objects.filter(character=self.minstrel)
.annotate(
rank=SearchRank(
SearchVector("dialogue"),
SearchQuery("brave sir robin"),
# Divide the rank by the document length and by the number of
# unique words in document.
normalization=Value(2).bitor(Value(8)),
),
)
.order_by("rank")
)
self.assertSequenceEqual(
searched,
[self.verse2, self.verse1, self.verse0, short_verse],
)
class SearchVectorIndexTests(PostgreSQLTestCase):
def test_search_vector_index(self):
"""SearchVector generates IMMUTABLE SQL in order to be indexable."""
# This test should be moved to test_indexes and use a functional
# index instead once support lands (see #26167).
query = Line.objects.all().query
resolved = SearchVector("id", "dialogue", config="english").resolve_expression(
query
)
compiler = query.get_compiler(connection.alias)
sql, params = resolved.as_sql(compiler, connection)
# Indexed function must be IMMUTABLE.
with connection.cursor() as cursor:
cursor.execute(
"CREATE INDEX search_vector_index ON %s USING GIN (%s)"
% (Line._meta.db_table, sql),
params,
)
class SearchQueryTests(PostgreSQLSimpleTestCase):
def test_str(self):
tests = (
(~SearchQuery("a"), "~SearchQuery(Value('a'))"),
(
(SearchQuery("a") | SearchQuery("b"))
& (SearchQuery("c") | SearchQuery("d")),
"((SearchQuery(Value('a')) || SearchQuery(Value('b'))) && "
"(SearchQuery(Value('c')) || SearchQuery(Value('d'))))",
),
(
SearchQuery("a") & (SearchQuery("b") | SearchQuery("c")),
"(SearchQuery(Value('a')) && (SearchQuery(Value('b')) || "
"SearchQuery(Value('c'))))",
),
(
(SearchQuery("a") | SearchQuery("b")) & SearchQuery("c"),
"((SearchQuery(Value('a')) || SearchQuery(Value('b'))) && "
"SearchQuery(Value('c')))",
),
(
SearchQuery("a")
& (SearchQuery("b") & (SearchQuery("c") | SearchQuery("d"))),
"(SearchQuery(Value('a')) && (SearchQuery(Value('b')) && "
"(SearchQuery(Value('c')) || SearchQuery(Value('d')))))",
),
)
for query, expected_str in tests:
with self.subTest(query=query):
self.assertEqual(str(query), expected_str)
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class SearchHeadlineTests(GrailTestData, PostgreSQLTestCase):
def test_headline(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
F("dialogue"),
SearchQuery("brave sir robin"),
config=SearchConfig("english"),
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"<b>Robin</b>. He was not at all afraid to be killed in nasty "
"ways. <b>Brave</b>, <b>brave</b>, <b>brave</b>, <b>brave</b> "
"<b>Sir</b> <b>Robin</b>",
)
def test_headline_untyped_args(self):
searched = Line.objects.annotate(
headline=SearchHeadline("dialogue", "killed", config="english"),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"Robin. He was not at all afraid to be <b>killed</b> in nasty "
"ways. Brave, brave, brave, brave Sir Robin",
)
def test_headline_with_config(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("cadeaux", config="french"),
config="french",
),
).get(pk=self.french.pk)
self.assertEqual(
searched.headline,
"Oh. Un beau <b>cadeau</b>. Oui oui.",
)
def test_headline_with_config_from_field(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("cadeaux", config=F("dialogue_config")),
config=F("dialogue_config"),
),
).get(pk=self.french.pk)
self.assertEqual(
searched.headline,
"Oh. Un beau <b>cadeau</b>. Oui oui.",
)
def test_headline_separator_options(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
"brave sir robin",
start_sel="<span>",
stop_sel="</span>",
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"<span>Robin</span>. He was not at all afraid to be killed in "
"nasty ways. <span>Brave</span>, <span>brave</span>, <span>brave"
"</span>, <span>brave</span> <span>Sir</span> <span>Robin</span>",
)
def test_headline_highlight_all_option(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("brave sir robin", config="english"),
highlight_all=True,
),
).get(pk=self.verse0.pk)
self.assertIn(
"<b>Bravely</b> bold <b>Sir</b> <b>Robin</b>, rode forth from "
"Camelot. He was not afraid to die, o ",
searched.headline,
)
def test_headline_short_word_option(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("Camelot", config="english"),
short_word=5,
min_words=8,
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
(
"<b>Camelot</b>. He was not afraid to die, o Brave Sir Robin. He "
"was not at all afraid"
),
)
def test_headline_fragments_words_options(self):
searched = Line.objects.annotate(
headline=SearchHeadline(
"dialogue",
SearchQuery("brave sir robin", config="english"),
fragment_delimiter="...<br>",
max_fragments=4,
max_words=3,
min_words=1,
),
).get(pk=self.verse0.pk)
self.assertEqual(
searched.headline,
"<b>Sir</b> <b>Robin</b>, rode...<br>"
"<b>Brave</b> <b>Sir</b> <b>Robin</b>...<br>"
"<b>Brave</b>, <b>brave</b>, <b>brave</b>...<br>"
"<b>brave</b> <b>Sir</b> <b>Robin</b>",
)
|
c1ec5d0c69d722edcd99beb4e316e4a3f80f146d75db991532e546435d4ab22c | import datetime
import json
from decimal import Decimal
from django import forms
from django.core import exceptions, serializers
from django.db.models import DateField, DateTimeField, F, Func, Value
from django.http import QueryDict
from django.test import override_settings
from django.test.utils import isolate_apps
from django.utils import timezone
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import (
BigAutoFieldModel,
PostgreSQLModel,
RangeLookupsModel,
RangesModel,
SmallAutoFieldModel,
)
try:
from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange
from django.contrib.postgres import fields as pg_fields
from django.contrib.postgres import forms as pg_forms
from django.contrib.postgres.validators import (
RangeMaxValueValidator,
RangeMinValueValidator,
)
except ImportError:
pass
@isolate_apps("postgres_tests")
class BasicTests(PostgreSQLSimpleTestCase):
def test_get_field_display(self):
class Model(PostgreSQLModel):
field = pg_fields.IntegerRangeField(
choices=[
["1-50", [((1, 25), "1-25"), ([26, 50], "26-50")]],
((51, 100), "51-100"),
],
)
tests = (
((1, 25), "1-25"),
([26, 50], "26-50"),
((51, 100), "51-100"),
((1, 2), "(1, 2)"),
([1, 2], "[1, 2]"),
)
for value, display in tests:
with self.subTest(value=value, display=display):
instance = Model(field=value)
self.assertEqual(instance.get_field_display(), display)
def test_discrete_range_fields_unsupported_default_bounds(self):
discrete_range_types = [
pg_fields.BigIntegerRangeField,
pg_fields.IntegerRangeField,
pg_fields.DateRangeField,
]
for field_type in discrete_range_types:
msg = f"Cannot use 'default_bounds' with {field_type.__name__}."
with self.assertRaisesMessage(TypeError, msg):
field_type(choices=[((51, 100), "51-100")], default_bounds="[]")
def test_continuous_range_fields_default_bounds(self):
continuous_range_types = [
pg_fields.DecimalRangeField,
pg_fields.DateTimeRangeField,
]
for field_type in continuous_range_types:
field = field_type(choices=[((51, 100), "51-100")], default_bounds="[]")
self.assertEqual(field.default_bounds, "[]")
def test_invalid_default_bounds(self):
tests = [")]", ")[", "](", "])", "([", "[(", "x", "", None]
msg = "default_bounds must be one of '[)', '(]', '()', or '[]'."
for invalid_bounds in tests:
with self.assertRaisesMessage(ValueError, msg):
pg_fields.DecimalRangeField(default_bounds=invalid_bounds)
def test_deconstruct(self):
field = pg_fields.DecimalRangeField()
*_, kwargs = field.deconstruct()
self.assertEqual(kwargs, {})
field = pg_fields.DecimalRangeField(default_bounds="[]")
*_, kwargs = field.deconstruct()
self.assertEqual(kwargs, {"default_bounds": "[]"})
class TestSaveLoad(PostgreSQLTestCase):
def test_all_fields(self):
now = timezone.now()
instance = RangesModel(
ints=NumericRange(0, 10),
bigints=NumericRange(10, 20),
decimals=NumericRange(20, 30),
timestamps=DateTimeTZRange(now - datetime.timedelta(hours=1), now),
dates=DateRange(now.date() - datetime.timedelta(days=1), now.date()),
)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(instance.ints, loaded.ints)
self.assertEqual(instance.bigints, loaded.bigints)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.timestamps, loaded.timestamps)
self.assertEqual(instance.dates, loaded.dates)
def test_range_object(self):
r = NumericRange(0, 10)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_tuple(self):
instance = RangesModel(ints=(0, 10))
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(NumericRange(0, 10), loaded.ints)
def test_tuple_range_with_default_bounds(self):
range_ = (timezone.now(), timezone.now() + datetime.timedelta(hours=1))
RangesModel.objects.create(timestamps_closed_bounds=range_, timestamps=range_)
loaded = RangesModel.objects.get()
self.assertEqual(
loaded.timestamps_closed_bounds,
DateTimeTZRange(range_[0], range_[1], "[]"),
)
self.assertEqual(
loaded.timestamps,
DateTimeTZRange(range_[0], range_[1], "[)"),
)
def test_range_object_boundaries(self):
r = NumericRange(0, 10, "[]")
instance = RangesModel(decimals=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.decimals)
self.assertIn(10, loaded.decimals)
def test_range_object_boundaries_range_with_default_bounds(self):
range_ = DateTimeTZRange(
timezone.now(),
timezone.now() + datetime.timedelta(hours=1),
bounds="()",
)
RangesModel.objects.create(timestamps_closed_bounds=range_)
loaded = RangesModel.objects.get()
self.assertEqual(loaded.timestamps_closed_bounds, range_)
def test_unbounded(self):
r = NumericRange(None, None, "()")
instance = RangesModel(decimals=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.decimals)
def test_empty(self):
r = NumericRange(empty=True)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_null(self):
instance = RangesModel(ints=None)
instance.save()
loaded = RangesModel.objects.get()
self.assertIsNone(loaded.ints)
def test_model_set_on_base_field(self):
instance = RangesModel()
field = instance._meta.get_field("ints")
self.assertEqual(field.model, RangesModel)
self.assertEqual(field.base_field.model, RangesModel)
class TestRangeContainsLookup(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.timestamps = [
datetime.datetime(year=2016, month=1, day=1),
datetime.datetime(year=2016, month=1, day=2, hour=1),
datetime.datetime(year=2016, month=1, day=2, hour=12),
datetime.datetime(year=2016, month=1, day=3),
datetime.datetime(year=2016, month=1, day=3, hour=1),
datetime.datetime(year=2016, month=2, day=2),
]
cls.aware_timestamps = [
timezone.make_aware(timestamp) for timestamp in cls.timestamps
]
cls.dates = [
datetime.date(year=2016, month=1, day=1),
datetime.date(year=2016, month=1, day=2),
datetime.date(year=2016, month=1, day=3),
datetime.date(year=2016, month=1, day=4),
datetime.date(year=2016, month=2, day=2),
datetime.date(year=2016, month=2, day=3),
]
cls.obj = RangesModel.objects.create(
dates=(cls.dates[0], cls.dates[3]),
dates_inner=(cls.dates[1], cls.dates[2]),
timestamps=(cls.timestamps[0], cls.timestamps[3]),
timestamps_inner=(cls.timestamps[1], cls.timestamps[2]),
)
cls.aware_obj = RangesModel.objects.create(
dates=(cls.dates[0], cls.dates[3]),
dates_inner=(cls.dates[1], cls.dates[2]),
timestamps=(cls.aware_timestamps[0], cls.aware_timestamps[3]),
timestamps_inner=(cls.timestamps[1], cls.timestamps[2]),
)
# Objects that don't match any queries.
for i in range(3, 4):
RangesModel.objects.create(
dates=(cls.dates[i], cls.dates[i + 1]),
timestamps=(cls.timestamps[i], cls.timestamps[i + 1]),
)
RangesModel.objects.create(
dates=(cls.dates[i], cls.dates[i + 1]),
timestamps=(cls.aware_timestamps[i], cls.aware_timestamps[i + 1]),
)
def test_datetime_range_contains(self):
filter_args = (
self.timestamps[1],
self.aware_timestamps[1],
(self.timestamps[1], self.timestamps[2]),
(self.aware_timestamps[1], self.aware_timestamps[2]),
Value(self.dates[0]),
Func(F("dates"), function="lower", output_field=DateTimeField()),
F("timestamps_inner"),
)
for filter_arg in filter_args:
with self.subTest(filter_arg=filter_arg):
self.assertCountEqual(
RangesModel.objects.filter(**{"timestamps__contains": filter_arg}),
[self.obj, self.aware_obj],
)
def test_date_range_contains(self):
filter_args = (
self.timestamps[1],
(self.dates[1], self.dates[2]),
Value(self.dates[0], output_field=DateField()),
Func(F("timestamps"), function="lower", output_field=DateField()),
F("dates_inner"),
)
for filter_arg in filter_args:
with self.subTest(filter_arg=filter_arg):
self.assertCountEqual(
RangesModel.objects.filter(**{"dates__contains": filter_arg}),
[self.obj, self.aware_obj],
)
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = RangesModel.objects.bulk_create(
[
RangesModel(ints=NumericRange(0, 10)),
RangesModel(ints=NumericRange(5, 15)),
RangesModel(ints=NumericRange(None, 0)),
RangesModel(ints=NumericRange(empty=True)),
RangesModel(ints=None),
]
)
def test_exact(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__exact=NumericRange(0, 10)),
[self.objs[0]],
)
def test_isnull(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isnull=True),
[self.objs[4]],
)
def test_isempty(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isempty=True),
[self.objs[3]],
)
def test_contains(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=8),
[self.objs[0], self.objs[1]],
)
def test_contains_range(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=NumericRange(3, 8)),
[self.objs[0]],
)
def test_contained_by(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contained_by=NumericRange(0, 20)),
[self.objs[0], self.objs[1], self.objs[3]],
)
def test_overlap(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__overlap=NumericRange(3, 8)),
[self.objs[0], self.objs[1]],
)
def test_fully_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_lt=NumericRange(5, 10)),
[self.objs[2]],
)
def test_fully_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_gt=NumericRange(5, 10)),
[],
)
def test_not_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_lt=NumericRange(5, 10)),
[self.objs[1]],
)
def test_not_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_gt=NumericRange(5, 10)),
[self.objs[0], self.objs[2]],
)
def test_adjacent_to(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__adjacent_to=NumericRange(0, 5)),
[self.objs[1], self.objs[2]],
)
def test_startswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith=0),
[self.objs[0]],
)
def test_endswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__endswith=0),
[self.objs[2]],
)
def test_startswith_chaining(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith__gte=0),
[self.objs[0], self.objs[1]],
)
def test_bound_type(self):
decimals = RangesModel.objects.bulk_create(
[
RangesModel(decimals=NumericRange(None, 10)),
RangesModel(decimals=NumericRange(10, None)),
RangesModel(decimals=NumericRange(5, 15)),
RangesModel(decimals=NumericRange(5, 15, "(]")),
]
)
tests = [
("lower_inc", True, [decimals[1], decimals[2]]),
("lower_inc", False, [decimals[0], decimals[3]]),
("lower_inf", True, [decimals[0]]),
("lower_inf", False, [decimals[1], decimals[2], decimals[3]]),
("upper_inc", True, [decimals[3]]),
("upper_inc", False, [decimals[0], decimals[1], decimals[2]]),
("upper_inf", True, [decimals[1]]),
("upper_inf", False, [decimals[0], decimals[2], decimals[3]]),
]
for lookup, filter_arg, excepted_result in tests:
with self.subTest(lookup=lookup, filter_arg=filter_arg):
self.assertSequenceEqual(
RangesModel.objects.filter(**{"decimals__%s" % lookup: filter_arg}),
excepted_result,
)
class TestQueryingWithRanges(PostgreSQLTestCase):
def test_date_range(self):
objs = [
RangeLookupsModel.objects.create(date="2015-01-01"),
RangeLookupsModel.objects.create(date="2015-05-05"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
date__contained_by=DateRange("2015-01-01", "2015-05-04")
),
[objs[0]],
)
def test_date_range_datetime_field(self):
objs = [
RangeLookupsModel.objects.create(timestamp="2015-01-01"),
RangeLookupsModel.objects.create(timestamp="2015-05-05"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
timestamp__date__contained_by=DateRange("2015-01-01", "2015-05-04")
),
[objs[0]],
)
def test_datetime_range(self):
objs = [
RangeLookupsModel.objects.create(timestamp="2015-01-01T09:00:00"),
RangeLookupsModel.objects.create(timestamp="2015-05-05T17:00:00"),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
timestamp__contained_by=DateTimeTZRange(
"2015-01-01T09:00", "2015-05-04T23:55"
)
),
[objs[0]],
)
def test_small_integer_field_contained_by(self):
objs = [
RangeLookupsModel.objects.create(small_integer=8),
RangeLookupsModel.objects.create(small_integer=4),
RangeLookupsModel.objects.create(small_integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
small_integer__contained_by=NumericRange(4, 6)
),
[objs[1]],
)
def test_integer_range(self):
objs = [
RangeLookupsModel.objects.create(integer=5),
RangeLookupsModel.objects.create(integer=99),
RangeLookupsModel.objects.create(integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(integer__contained_by=NumericRange(1, 98)),
[objs[0]],
)
def test_biginteger_range(self):
objs = [
RangeLookupsModel.objects.create(big_integer=5),
RangeLookupsModel.objects.create(big_integer=99),
RangeLookupsModel.objects.create(big_integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
big_integer__contained_by=NumericRange(1, 98)
),
[objs[0]],
)
def test_decimal_field_contained_by(self):
objs = [
RangeLookupsModel.objects.create(decimal_field=Decimal("1.33")),
RangeLookupsModel.objects.create(decimal_field=Decimal("2.88")),
RangeLookupsModel.objects.create(decimal_field=Decimal("99.17")),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
decimal_field__contained_by=NumericRange(
Decimal("1.89"), Decimal("7.91")
),
),
[objs[1]],
)
def test_float_range(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=NumericRange(1, 98)),
[objs[0]],
)
def test_small_auto_field_contained_by(self):
objs = SmallAutoFieldModel.objects.bulk_create(
[SmallAutoFieldModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
SmallAutoFieldModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_auto_field_contained_by(self):
objs = RangeLookupsModel.objects.bulk_create(
[RangeLookupsModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_big_auto_field_contained_by(self):
objs = BigAutoFieldModel.objects.bulk_create(
[BigAutoFieldModel() for i in range(1, 5)]
)
self.assertSequenceEqual(
BigAutoFieldModel.objects.filter(
id__contained_by=NumericRange(objs[1].pk, objs[3].pk),
),
objs[1:3],
)
def test_f_ranges(self):
parent = RangesModel.objects.create(decimals=NumericRange(0, 10))
objs = [
RangeLookupsModel.objects.create(float=5, parent=parent),
RangeLookupsModel.objects.create(float=99, parent=parent),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=F("parent__decimals")),
[objs[0]],
)
def test_exclude(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.exclude(float__contained_by=NumericRange(0, 100)),
[objs[2]],
)
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = (
'[{"fields": {"ints": "{\\"upper\\": \\"10\\", \\"lower\\": \\"0\\", '
'\\"bounds\\": \\"[)\\"}", "decimals": "{\\"empty\\": true}", '
'"bigints": null, "timestamps": '
'"{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", '
'\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"[)\\"}", '
'"timestamps_inner": null, '
'"timestamps_closed_bounds": "{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", '
'\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"()\\"}", '
'"dates": "{\\"upper\\": \\"2014-02-02\\", \\"lower\\": \\"2014-01-01\\", '
'\\"bounds\\": \\"[)\\"}", "dates_inner": null }, '
'"model": "postgres_tests.rangesmodel", "pk": null}]'
)
lower_date = datetime.date(2014, 1, 1)
upper_date = datetime.date(2014, 2, 2)
lower_dt = datetime.datetime(2014, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
upper_dt = datetime.datetime(2014, 2, 2, 12, 12, 12, tzinfo=timezone.utc)
def test_dumping(self):
instance = RangesModel(
ints=NumericRange(0, 10),
decimals=NumericRange(empty=True),
timestamps=DateTimeTZRange(self.lower_dt, self.upper_dt),
timestamps_closed_bounds=DateTimeTZRange(
self.lower_dt,
self.upper_dt,
bounds="()",
),
dates=DateRange(self.lower_date, self.upper_date),
)
data = serializers.serialize("json", [instance])
dumped = json.loads(data)
for field in ("ints", "dates", "timestamps", "timestamps_closed_bounds"):
dumped[0]["fields"][field] = json.loads(dumped[0]["fields"][field])
check = json.loads(self.test_data)
for field in ("ints", "dates", "timestamps", "timestamps_closed_bounds"):
check[0]["fields"][field] = json.loads(check[0]["fields"][field])
self.assertEqual(dumped, check)
def test_loading(self):
instance = list(serializers.deserialize("json", self.test_data))[0].object
self.assertEqual(instance.ints, NumericRange(0, 10))
self.assertEqual(instance.decimals, NumericRange(empty=True))
self.assertIsNone(instance.bigints)
self.assertEqual(instance.dates, DateRange(self.lower_date, self.upper_date))
self.assertEqual(
instance.timestamps, DateTimeTZRange(self.lower_dt, self.upper_dt)
)
self.assertEqual(
instance.timestamps_closed_bounds,
DateTimeTZRange(self.lower_dt, self.upper_dt, bounds="()"),
)
def test_serialize_range_with_null(self):
instance = RangesModel(ints=NumericRange(None, 10))
data = serializers.serialize("json", [instance])
new_instance = list(serializers.deserialize("json", data))[0].object
self.assertEqual(new_instance.ints, NumericRange(None, 10))
instance = RangesModel(ints=NumericRange(10, None))
data = serializers.serialize("json", [instance])
new_instance = list(serializers.deserialize("json", data))[0].object
self.assertEqual(new_instance.ints, NumericRange(10, None))
class TestChecks(PostgreSQLSimpleTestCase):
def test_choices_tuple_list(self):
class Model(PostgreSQLModel):
field = pg_fields.IntegerRangeField(
choices=[
["1-50", [((1, 25), "1-25"), ([26, 50], "26-50")]],
((51, 100), "51-100"),
],
)
self.assertEqual(Model._meta.get_field("field").check(), [])
class TestValidators(PostgreSQLSimpleTestCase):
def test_max(self):
validator = RangeMaxValueValidator(5)
validator(NumericRange(0, 5))
msg = "Ensure that this range is completely less than or equal to 5."
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], msg)
self.assertEqual(cm.exception.code, "max_value")
with self.assertRaisesMessage(exceptions.ValidationError, msg):
validator(NumericRange(0, None)) # an unbound range
def test_min(self):
validator = RangeMinValueValidator(5)
validator(NumericRange(10, 15))
msg = "Ensure that this range is completely greater than or equal to 5."
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], msg)
self.assertEqual(cm.exception.code, "min_value")
with self.assertRaisesMessage(exceptions.ValidationError, msg):
validator(NumericRange(None, 10)) # an unbound range
class TestFormField(PostgreSQLSimpleTestCase):
def test_valid_integer(self):
field = pg_forms.IntegerRangeField()
value = field.clean(["1", "2"])
self.assertEqual(value, NumericRange(1, 2))
def test_valid_decimal(self):
field = pg_forms.DecimalRangeField()
value = field.clean(["1.12345", "2.001"])
self.assertEqual(value, NumericRange(Decimal("1.12345"), Decimal("2.001")))
def test_valid_timestamps(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(["01/01/2014 00:00:00", "02/02/2014 12:12:12"])
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(value, DateTimeTZRange(lower, upper))
def test_valid_dates(self):
field = pg_forms.DateRangeField()
value = field.clean(["01/01/2014", "02/02/2014"])
lower = datetime.date(2014, 1, 1)
upper = datetime.date(2014, 2, 2)
self.assertEqual(value, DateRange(lower, upper))
def test_using_split_datetime_widget(self):
class SplitDateTimeRangeField(pg_forms.DateTimeRangeField):
base_field = forms.SplitDateTimeField
class SplitForm(forms.Form):
field = SplitDateTimeRangeField()
form = SplitForm()
self.assertHTMLEqual(
str(form),
"""
<tr>
<th>
<label>Field:</label>
</th>
<td>
<input id="id_field_0_0" name="field_0_0" type="text">
<input id="id_field_0_1" name="field_0_1" type="text">
<input id="id_field_1_0" name="field_1_0" type="text">
<input id="id_field_1_1" name="field_1_1" type="text">
</td>
</tr>
""",
)
form = SplitForm(
{
"field_0_0": "01/01/2014",
"field_0_1": "00:00:00",
"field_1_0": "02/02/2014",
"field_1_1": "12:12:12",
}
)
self.assertTrue(form.is_valid())
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(form.cleaned_data["field"], DateTimeTZRange(lower, upper))
def test_none(self):
field = pg_forms.IntegerRangeField(required=False)
value = field.clean(["", ""])
self.assertIsNone(value)
def test_datetime_form_as_table(self):
class DateTimeRangeForm(forms.Form):
datetime_field = pg_forms.DateTimeRangeField(show_hidden_initial=True)
form = DateTimeRangeForm()
self.assertHTMLEqual(
form.as_table(),
"""
<tr><th>
<label>Datetime field:</label>
</th><td>
<input type="text" name="datetime_field_0" id="id_datetime_field_0">
<input type="text" name="datetime_field_1" id="id_datetime_field_1">
<input type="hidden" name="initial-datetime_field_0"
id="initial-id_datetime_field_0">
<input type="hidden" name="initial-datetime_field_1"
id="initial-id_datetime_field_1">
</td></tr>
""",
)
form = DateTimeRangeForm(
{
"datetime_field_0": "2010-01-01 11:13:00",
"datetime_field_1": "2020-12-12 16:59:00",
}
)
self.assertHTMLEqual(
form.as_table(),
"""
<tr><th>
<label>Datetime field:</label>
</th><td>
<input type="text" name="datetime_field_0"
value="2010-01-01 11:13:00" id="id_datetime_field_0">
<input type="text" name="datetime_field_1"
value="2020-12-12 16:59:00" id="id_datetime_field_1">
<input type="hidden" name="initial-datetime_field_0"
value="2010-01-01 11:13:00" id="initial-id_datetime_field_0">
<input type="hidden" name="initial-datetime_field_1"
value="2020-12-12 16:59:00" id="initial-id_datetime_field_1"></td></tr>
""",
)
def test_datetime_form_initial_data(self):
class DateTimeRangeForm(forms.Form):
datetime_field = pg_forms.DateTimeRangeField(show_hidden_initial=True)
data = QueryDict(mutable=True)
data.update(
{
"datetime_field_0": "2010-01-01 11:13:00",
"datetime_field_1": "",
"initial-datetime_field_0": "2010-01-01 10:12:00",
"initial-datetime_field_1": "",
}
)
form = DateTimeRangeForm(data=data)
self.assertTrue(form.has_changed())
data["initial-datetime_field_0"] = "2010-01-01 11:13:00"
form = DateTimeRangeForm(data=data)
self.assertFalse(form.has_changed())
def test_rendering(self):
class RangeForm(forms.Form):
ints = pg_forms.IntegerRangeField()
self.assertHTMLEqual(
str(RangeForm()),
"""
<tr>
<th><label>Ints:</label></th>
<td>
<input id="id_ints_0" name="ints_0" type="number">
<input id="id_ints_1" name="ints_1" type="number">
</td>
</tr>
""",
)
def test_integer_lower_bound_higher(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["10", "2"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_integer_open(self):
field = pg_forms.IntegerRangeField()
value = field.clean(["", "0"])
self.assertEqual(value, NumericRange(None, 0))
def test_integer_incorrect_data_type(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("1")
self.assertEqual(cm.exception.messages[0], "Enter two whole numbers.")
self.assertEqual(cm.exception.code, "invalid")
def test_integer_invalid_lower(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["a", "2"])
self.assertEqual(cm.exception.messages[0], "Enter a whole number.")
def test_integer_invalid_upper(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["1", "b"])
self.assertEqual(cm.exception.messages[0], "Enter a whole number.")
def test_integer_required(self):
field = pg_forms.IntegerRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean([1, ""])
self.assertEqual(value, NumericRange(1, None))
def test_decimal_lower_bound_higher(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["1.8", "1.6"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_decimal_open(self):
field = pg_forms.DecimalRangeField()
value = field.clean(["", "3.1415926"])
self.assertEqual(value, NumericRange(None, Decimal("3.1415926")))
def test_decimal_incorrect_data_type(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("1.6")
self.assertEqual(cm.exception.messages[0], "Enter two numbers.")
self.assertEqual(cm.exception.code, "invalid")
def test_decimal_invalid_lower(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["a", "3.1415926"])
self.assertEqual(cm.exception.messages[0], "Enter a number.")
def test_decimal_invalid_upper(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["1.61803399", "b"])
self.assertEqual(cm.exception.messages[0], "Enter a number.")
def test_decimal_required(self):
field = pg_forms.DecimalRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean(["1.61803399", ""])
self.assertEqual(value, NumericRange(Decimal("1.61803399"), None))
def test_date_lower_bound_higher(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2013-04-09", "1976-04-16"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_date_open(self):
field = pg_forms.DateRangeField()
value = field.clean(["", "2013-04-09"])
self.assertEqual(value, DateRange(None, datetime.date(2013, 4, 9)))
def test_date_incorrect_data_type(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("1")
self.assertEqual(cm.exception.messages[0], "Enter two valid dates.")
self.assertEqual(cm.exception.code, "invalid")
def test_date_invalid_lower(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["a", "2013-04-09"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date.")
def test_date_invalid_upper(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2013-04-09", "b"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date.")
def test_date_required(self):
field = pg_forms.DateRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean(["1976-04-16", ""])
self.assertEqual(value, DateRange(datetime.date(1976, 4, 16), None))
def test_date_has_changed_first(self):
self.assertTrue(
pg_forms.DateRangeField().has_changed(
["2010-01-01", "2020-12-12"],
["2010-01-31", "2020-12-12"],
)
)
def test_date_has_changed_last(self):
self.assertTrue(
pg_forms.DateRangeField().has_changed(
["2010-01-01", "2020-12-12"],
["2010-01-01", "2020-12-31"],
)
)
def test_datetime_lower_bound_higher(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2006-10-25 14:59", "2006-10-25 14:58"])
self.assertEqual(
cm.exception.messages[0],
"The start of the range must not exceed the end of the range.",
)
self.assertEqual(cm.exception.code, "bound_ordering")
def test_datetime_open(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(["", "2013-04-09 11:45"])
self.assertEqual(
value, DateTimeTZRange(None, datetime.datetime(2013, 4, 9, 11, 45))
)
def test_datetime_incorrect_data_type(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean("2013-04-09 11:45")
self.assertEqual(cm.exception.messages[0], "Enter two valid date/times.")
self.assertEqual(cm.exception.code, "invalid")
def test_datetime_invalid_lower(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["45", "2013-04-09 11:45"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date/time.")
def test_datetime_invalid_upper(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["2013-04-09 11:45", "sweet pickles"])
self.assertEqual(cm.exception.messages[0], "Enter a valid date/time.")
def test_datetime_required(self):
field = pg_forms.DateTimeRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(["", ""])
self.assertEqual(cm.exception.messages[0], "This field is required.")
value = field.clean(["2013-04-09 11:45", ""])
self.assertEqual(
value, DateTimeTZRange(datetime.datetime(2013, 4, 9, 11, 45), None)
)
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Johannesburg")
def test_datetime_prepare_value(self):
field = pg_forms.DateTimeRangeField()
value = field.prepare_value(
DateTimeTZRange(
datetime.datetime(2015, 5, 22, 16, 6, 33, tzinfo=timezone.utc), None
)
)
self.assertEqual(value, [datetime.datetime(2015, 5, 22, 18, 6, 33), None])
def test_datetime_has_changed_first(self):
self.assertTrue(
pg_forms.DateTimeRangeField().has_changed(
["2010-01-01 00:00", "2020-12-12 00:00"],
["2010-01-31 23:00", "2020-12-12 00:00"],
)
)
def test_datetime_has_changed_last(self):
self.assertTrue(
pg_forms.DateTimeRangeField().has_changed(
["2010-01-01 00:00", "2020-12-12 00:00"],
["2010-01-01 00:00", "2020-12-31 23:00"],
)
)
def test_model_field_formfield_integer(self):
model_field = pg_fields.IntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
self.assertEqual(form_field.range_kwargs, {})
def test_model_field_formfield_biginteger(self):
model_field = pg_fields.BigIntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
self.assertEqual(form_field.range_kwargs, {})
def test_model_field_formfield_float(self):
model_field = pg_fields.DecimalRangeField(default_bounds="()")
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DecimalRangeField)
self.assertEqual(form_field.range_kwargs, {"bounds": "()"})
def test_model_field_formfield_date(self):
model_field = pg_fields.DateRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateRangeField)
self.assertEqual(form_field.range_kwargs, {})
def test_model_field_formfield_datetime(self):
model_field = pg_fields.DateTimeRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateTimeRangeField)
self.assertEqual(
form_field.range_kwargs,
{"bounds": pg_fields.ranges.CANONICAL_RANGE_BOUNDS},
)
def test_model_field_formfield_datetime_default_bounds(self):
model_field = pg_fields.DateTimeRangeField(default_bounds="[]")
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateTimeRangeField)
self.assertEqual(form_field.range_kwargs, {"bounds": "[]"})
def test_model_field_with_default_bounds(self):
field = pg_forms.DateTimeRangeField(default_bounds="[]")
value = field.clean(["2014-01-01 00:00:00", "2014-02-03 12:13:14"])
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 3, 12, 13, 14)
self.assertEqual(value, DateTimeTZRange(lower, upper, "[]"))
def test_has_changed(self):
for field, value in (
(pg_forms.DateRangeField(), ["2010-01-01", "2020-12-12"]),
(pg_forms.DateTimeRangeField(), ["2010-01-01 11:13", "2020-12-12 14:52"]),
(pg_forms.IntegerRangeField(), [1, 2]),
(pg_forms.DecimalRangeField(), ["1.12345", "2.001"]),
):
with self.subTest(field=field.__class__.__name__):
self.assertTrue(field.has_changed(None, value))
self.assertTrue(field.has_changed([value[0], ""], value))
self.assertTrue(field.has_changed(["", value[1]], value))
self.assertFalse(field.has_changed(value, value))
class TestWidget(PostgreSQLSimpleTestCase):
def test_range_widget(self):
f = pg_forms.ranges.DateTimeRangeField()
self.assertHTMLEqual(
f.widget.render("datetimerange", ""),
'<input type="text" name="datetimerange_0">'
'<input type="text" name="datetimerange_1">',
)
self.assertHTMLEqual(
f.widget.render("datetimerange", None),
'<input type="text" name="datetimerange_0">'
'<input type="text" name="datetimerange_1">',
)
dt_range = DateTimeTZRange(
datetime.datetime(2006, 1, 10, 7, 30), datetime.datetime(2006, 2, 12, 9, 50)
)
self.assertHTMLEqual(
f.widget.render("datetimerange", dt_range),
'<input type="text" name="datetimerange_0" value="2006-01-10 07:30:00">'
'<input type="text" name="datetimerange_1" value="2006-02-12 09:50:00">',
)
|
ba41b5de91d573827b5b307ded68b2cdc4259acd7e1e81e63420eb9ee8c070c4 | import unittest
from forms_tests.widget_tests.base import WidgetTest
from django.db import connection
from django.test import SimpleTestCase, TestCase, modify_settings
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
class PostgreSQLSimpleTestCase(SimpleTestCase):
pass
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
class PostgreSQLTestCase(TestCase):
pass
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests")
# To locate the widget's template.
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class PostgreSQLWidgetTestCase(WidgetTest, PostgreSQLSimpleTestCase):
pass
|
ecd9aa7a60f3f8ae3192117658194cd53c91bd1d100d23afe103ae5006054557 | from django.db import models
from .fields import (
ArrayField,
BigIntegerRangeField,
CICharField,
CIEmailField,
CITextField,
DateRangeField,
DateTimeRangeField,
DecimalRangeField,
EnumField,
HStoreField,
IntegerRangeField,
SearchVectorField,
)
class Tag:
def __init__(self, tag_id):
self.tag_id = tag_id
def __eq__(self, other):
return isinstance(other, Tag) and self.tag_id == other.tag_id
class TagField(models.SmallIntegerField):
def from_db_value(self, value, expression, connection):
if value is None:
return value
return Tag(int(value))
def to_python(self, value):
if isinstance(value, Tag):
return value
if value is None:
return value
return Tag(int(value))
def get_prep_value(self, value):
return value.tag_id
class PostgreSQLModel(models.Model):
class Meta:
abstract = True
required_db_vendor = "postgresql"
class IntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=list, blank=True)
class NullableIntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), blank=True, null=True)
field_nested = ArrayField(ArrayField(models.IntegerField(null=True)), null=True)
order = models.IntegerField(null=True)
class CharArrayModel(PostgreSQLModel):
field = ArrayField(models.CharField(max_length=10))
class DateTimeArrayModel(PostgreSQLModel):
datetimes = ArrayField(models.DateTimeField())
dates = ArrayField(models.DateField())
times = ArrayField(models.TimeField())
class NestedIntegerArrayModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.IntegerField()))
class OtherTypesArrayModel(PostgreSQLModel):
ips = ArrayField(models.GenericIPAddressField(), default=list)
uuids = ArrayField(models.UUIDField(), default=list)
decimals = ArrayField(
models.DecimalField(max_digits=5, decimal_places=2), default=list
)
tags = ArrayField(TagField(), blank=True, null=True)
json = ArrayField(models.JSONField(default=dict), default=list)
int_ranges = ArrayField(IntegerRangeField(), blank=True, null=True)
bigint_ranges = ArrayField(BigIntegerRangeField(), blank=True, null=True)
class HStoreModel(PostgreSQLModel):
field = HStoreField(blank=True, null=True)
array_field = ArrayField(HStoreField(), null=True)
class ArrayEnumModel(PostgreSQLModel):
array_of_enums = ArrayField(EnumField(max_length=20))
class CharFieldModel(models.Model):
field = models.CharField(max_length=64)
class TextFieldModel(models.Model):
field = models.TextField()
class SmallAutoFieldModel(models.Model):
id = models.SmallAutoField(primary_key=True)
class BigAutoFieldModel(models.Model):
id = models.BigAutoField(primary_key=True)
# Scene/Character/Line models are used to test full text search. They're
# populated with content from Monty Python and the Holy Grail.
class Scene(models.Model):
scene = models.TextField()
setting = models.CharField(max_length=255)
class Character(models.Model):
name = models.CharField(max_length=255)
class CITestModel(PostgreSQLModel):
name = CICharField(primary_key=True, max_length=255)
email = CIEmailField()
description = CITextField()
array_field = ArrayField(CITextField(), null=True)
class Line(PostgreSQLModel):
scene = models.ForeignKey("Scene", models.CASCADE)
character = models.ForeignKey("Character", models.CASCADE)
dialogue = models.TextField(blank=True, null=True)
dialogue_search_vector = SearchVectorField(blank=True, null=True)
dialogue_config = models.CharField(max_length=100, blank=True, null=True)
class LineSavedSearch(PostgreSQLModel):
line = models.ForeignKey("Line", models.CASCADE)
query = models.CharField(max_length=100)
class RangesModel(PostgreSQLModel):
ints = IntegerRangeField(blank=True, null=True)
bigints = BigIntegerRangeField(blank=True, null=True)
decimals = DecimalRangeField(blank=True, null=True)
timestamps = DateTimeRangeField(blank=True, null=True)
timestamps_inner = DateTimeRangeField(blank=True, null=True)
timestamps_closed_bounds = DateTimeRangeField(
blank=True,
null=True,
default_bounds="[]",
)
dates = DateRangeField(blank=True, null=True)
dates_inner = DateRangeField(blank=True, null=True)
class RangeLookupsModel(PostgreSQLModel):
parent = models.ForeignKey(RangesModel, models.SET_NULL, blank=True, null=True)
integer = models.IntegerField(blank=True, null=True)
big_integer = models.BigIntegerField(blank=True, null=True)
float = models.FloatField(blank=True, null=True)
timestamp = models.DateTimeField(blank=True, null=True)
date = models.DateField(blank=True, null=True)
small_integer = models.SmallIntegerField(blank=True, null=True)
decimal_field = models.DecimalField(
max_digits=5, decimal_places=2, blank=True, null=True
)
class ArrayFieldSubclass(ArrayField):
def __init__(self, *args, **kwargs):
super().__init__(models.IntegerField())
class AggregateTestModel(PostgreSQLModel):
"""
To test postgres-specific general aggregation functions
"""
char_field = models.CharField(max_length=30, blank=True)
text_field = models.TextField(blank=True)
integer_field = models.IntegerField(null=True)
boolean_field = models.BooleanField(null=True)
json_field = models.JSONField(null=True)
class StatTestModel(PostgreSQLModel):
"""
To test postgres-specific aggregation functions for statistics
"""
int1 = models.IntegerField()
int2 = models.IntegerField()
related_field = models.ForeignKey(AggregateTestModel, models.SET_NULL, null=True)
class NowTestModel(models.Model):
when = models.DateTimeField(null=True, default=None)
class UUIDTestModel(models.Model):
uuid = models.UUIDField(default=None, null=True)
class Room(models.Model):
number = models.IntegerField(unique=True)
class HotelReservation(PostgreSQLModel):
room = models.ForeignKey("Room", on_delete=models.CASCADE)
datespan = DateRangeField()
start = models.DateTimeField()
end = models.DateTimeField()
cancelled = models.BooleanField(default=False)
requirements = models.JSONField(blank=True, null=True)
|
eaf613ef5cb8e4ae66615fc377e075902155437620e1e6ddee5c48bff48346d0 | from django.db import connection
from django.test import modify_settings
from . import PostgreSQLTestCase
from .models import CharFieldModel, TextFieldModel
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class UnaccentTest(PostgreSQLTestCase):
Model = CharFieldModel
@classmethod
def setUpTestData(cls):
cls.Model.objects.bulk_create(
[
cls.Model(field="àéÖ"),
cls.Model(field="aeO"),
cls.Model(field="aeo"),
]
)
def test_unaccent(self):
self.assertQuerysetEqual(
self.Model.objects.filter(field__unaccent="aeO"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
def test_unaccent_chained(self):
"""
Unaccent can be used chained with a lookup (which should be the case
since unaccent implements the Transform API)
"""
self.assertQuerysetEqual(
self.Model.objects.filter(field__unaccent__iexact="aeO"),
["àéÖ", "aeO", "aeo"],
transform=lambda instance: instance.field,
ordered=False,
)
self.assertQuerysetEqual(
self.Model.objects.filter(field__unaccent__endswith="éÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
def test_unaccent_with_conforming_strings_off(self):
"""SQL is valid when standard_conforming_strings is off."""
with connection.cursor() as cursor:
cursor.execute("SHOW standard_conforming_strings")
disable_conforming_strings = cursor.fetchall()[0][0] == "on"
if disable_conforming_strings:
cursor.execute("SET standard_conforming_strings TO off")
try:
self.assertQuerysetEqual(
self.Model.objects.filter(field__unaccent__endswith="éÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
finally:
if disable_conforming_strings:
cursor.execute("SET standard_conforming_strings TO on")
def test_unaccent_accentuated_needle(self):
self.assertQuerysetEqual(
self.Model.objects.filter(field__unaccent="aéÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False,
)
class UnaccentTextFieldTest(UnaccentTest):
"""
TextField should have the exact same behavior as CharField
regarding unaccent lookups.
"""
Model = TextFieldModel
|
b6aed8f702939d9b49f689fe7e8170f0dea304a85cbb4ad9415f02c3545ca750 | import unittest
from unittest import mock
from migrations.test_base import OperationTestBase
from django.db import IntegrityError, NotSupportedError, connection, transaction
from django.db.migrations.state import ProjectState
from django.db.models import CheckConstraint, Index, Q, UniqueConstraint
from django.db.utils import ProgrammingError
from django.test import modify_settings, override_settings, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from . import PostgreSQLTestCase
try:
from django.contrib.postgres.indexes import BrinIndex, BTreeIndex
from django.contrib.postgres.operations import (
AddConstraintNotValid,
AddIndexConcurrently,
BloomExtension,
CreateCollation,
CreateExtension,
RemoveCollation,
RemoveIndexConcurrently,
ValidateConstraint,
)
except ImportError:
pass
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
@modify_settings(INSTALLED_APPS={"append": "migrations"})
class AddIndexConcurrentlyTests(OperationTestBase):
app_label = "test_add_concurrently"
def test_requires_atomic_false(self):
project_state = self.set_up_test_model(self.app_label)
new_state = project_state.clone()
operation = AddIndexConcurrently(
"Pony",
Index(fields=["pink"], name="pony_pink_idx"),
)
msg = (
"The AddIndexConcurrently operation cannot be executed inside "
"a transaction (set atomic = False on the migration)."
)
with self.assertRaisesMessage(NotSupportedError, msg):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
def test_add(self):
project_state = self.set_up_test_model(self.app_label, index=False)
table_name = "%s_pony" % self.app_label
index = Index(fields=["pink"], name="pony_pink_idx")
new_state = project_state.clone()
operation = AddIndexConcurrently("Pony", index)
self.assertEqual(
operation.describe(),
"Concurrently create index pony_pink_idx on field(s) pink of model Pony",
)
operation.state_forwards(self.app_label, new_state)
self.assertEqual(
len(new_state.models[self.app_label, "pony"].options["indexes"]), 1
)
self.assertIndexNotExists(table_name, ["pink"])
# Add index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertIndexExists(table_name, ["pink"])
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertIndexNotExists(table_name, ["pink"])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "AddIndexConcurrently")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"model_name": "Pony", "index": index})
def test_add_other_index_type(self):
project_state = self.set_up_test_model(self.app_label, index=False)
table_name = "%s_pony" % self.app_label
new_state = project_state.clone()
operation = AddIndexConcurrently(
"Pony",
BrinIndex(fields=["pink"], name="pony_pink_brin_idx"),
)
self.assertIndexNotExists(table_name, ["pink"])
# Add index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertIndexExists(table_name, ["pink"], index_type="brin")
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertIndexNotExists(table_name, ["pink"])
def test_add_with_options(self):
project_state = self.set_up_test_model(self.app_label, index=False)
table_name = "%s_pony" % self.app_label
new_state = project_state.clone()
index = BTreeIndex(fields=["pink"], name="pony_pink_btree_idx", fillfactor=70)
operation = AddIndexConcurrently("Pony", index)
self.assertIndexNotExists(table_name, ["pink"])
# Add index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertIndexExists(table_name, ["pink"], index_type="btree")
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertIndexNotExists(table_name, ["pink"])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
@modify_settings(INSTALLED_APPS={"append": "migrations"})
class RemoveIndexConcurrentlyTests(OperationTestBase):
app_label = "test_rm_concurrently"
def test_requires_atomic_false(self):
project_state = self.set_up_test_model(self.app_label, index=True)
new_state = project_state.clone()
operation = RemoveIndexConcurrently("Pony", "pony_pink_idx")
msg = (
"The RemoveIndexConcurrently operation cannot be executed inside "
"a transaction (set atomic = False on the migration)."
)
with self.assertRaisesMessage(NotSupportedError, msg):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
def test_remove(self):
project_state = self.set_up_test_model(self.app_label, index=True)
table_name = "%s_pony" % self.app_label
self.assertTableExists(table_name)
new_state = project_state.clone()
operation = RemoveIndexConcurrently("Pony", "pony_pink_idx")
self.assertEqual(
operation.describe(),
"Concurrently remove index pony_pink_idx from Pony",
)
operation.state_forwards(self.app_label, new_state)
self.assertEqual(
len(new_state.models[self.app_label, "pony"].options["indexes"]), 0
)
self.assertIndexExists(table_name, ["pink"])
# Remove index.
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertIndexNotExists(table_name, ["pink"])
# Reversal.
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertIndexExists(table_name, ["pink"])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "RemoveIndexConcurrently")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"model_name": "Pony", "name": "pony_pink_idx"})
class NoMigrationRouter:
def allow_migrate(self, db, app_label, **hints):
return False
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
class CreateExtensionTests(PostgreSQLTestCase):
app_label = "test_allow_create_extention"
@override_settings(DATABASE_ROUTERS=[NoMigrationRouter()])
def test_no_allow_migrate(self):
operation = CreateExtension("tablefunc")
project_state = ProjectState()
new_state = project_state.clone()
# Don't create an extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 0)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 0)
def test_allow_migrate(self):
operation = CreateExtension("tablefunc")
self.assertEqual(
operation.migration_name_fragment, "create_extension_tablefunc"
)
project_state = ProjectState()
new_state = project_state.clone()
# Create an extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 4)
self.assertIn("CREATE EXTENSION IF NOT EXISTS", captured_queries[1]["sql"])
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 2)
self.assertIn("DROP EXTENSION IF EXISTS", captured_queries[1]["sql"])
def test_create_existing_extension(self):
operation = BloomExtension()
self.assertEqual(operation.migration_name_fragment, "create_extension_bloom")
project_state = ProjectState()
new_state = project_state.clone()
# Don't create an existing extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 3)
self.assertIn("SELECT", captured_queries[0]["sql"])
def test_drop_nonexistent_extension(self):
operation = CreateExtension("tablefunc")
project_state = ProjectState()
new_state = project_state.clone()
# Don't drop a nonexistent extension.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("SELECT", captured_queries[0]["sql"])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
class CreateCollationTests(PostgreSQLTestCase):
app_label = "test_allow_create_collation"
@override_settings(DATABASE_ROUTERS=[NoMigrationRouter()])
def test_no_allow_migrate(self):
operation = CreateCollation("C_test", locale="C")
project_state = ProjectState()
new_state = project_state.clone()
# Don't create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 0)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 0)
def test_create(self):
operation = CreateCollation("C_test", locale="C")
self.assertEqual(operation.migration_name_fragment, "create_collation_c_test")
self.assertEqual(operation.describe(), "Create collation C_test")
project_state = ProjectState()
new_state = project_state.clone()
# Create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("CREATE COLLATION", captured_queries[0]["sql"])
# Creating the same collation raises an exception.
with self.assertRaisesMessage(ProgrammingError, "already exists"):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("DROP COLLATION", captured_queries[0]["sql"])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "CreateCollation")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"name": "C_test", "locale": "C"})
@skipUnlessDBFeature("supports_non_deterministic_collations")
def test_create_non_deterministic_collation(self):
operation = CreateCollation(
"case_insensitive_test",
"und-u-ks-level2",
provider="icu",
deterministic=False,
)
project_state = ProjectState()
new_state = project_state.clone()
# Create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("CREATE COLLATION", captured_queries[0]["sql"])
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("DROP COLLATION", captured_queries[0]["sql"])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "CreateCollation")
self.assertEqual(args, [])
self.assertEqual(
kwargs,
{
"name": "case_insensitive_test",
"locale": "und-u-ks-level2",
"provider": "icu",
"deterministic": False,
},
)
def test_create_collation_alternate_provider(self):
operation = CreateCollation(
"german_phonebook_test",
provider="icu",
locale="de-u-co-phonebk",
)
project_state = ProjectState()
new_state = project_state.clone()
# Create an collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("CREATE COLLATION", captured_queries[0]["sql"])
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("DROP COLLATION", captured_queries[0]["sql"])
def test_nondeterministic_collation_not_supported(self):
operation = CreateCollation(
"case_insensitive_test",
provider="icu",
locale="und-u-ks-level2",
deterministic=False,
)
project_state = ProjectState()
new_state = project_state.clone()
msg = "Non-deterministic collations require PostgreSQL 12+."
with connection.schema_editor(atomic=False) as editor:
with mock.patch(
"django.db.backends.postgresql.features.DatabaseFeatures."
"supports_non_deterministic_collations",
False,
):
with self.assertRaisesMessage(NotSupportedError, msg):
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
class RemoveCollationTests(PostgreSQLTestCase):
app_label = "test_allow_remove_collation"
@override_settings(DATABASE_ROUTERS=[NoMigrationRouter()])
def test_no_allow_migrate(self):
operation = RemoveCollation("C_test", locale="C")
project_state = ProjectState()
new_state = project_state.clone()
# Don't create a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 0)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 0)
def test_remove(self):
operation = CreateCollation("C_test", locale="C")
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
operation = RemoveCollation("C_test", locale="C")
self.assertEqual(operation.migration_name_fragment, "remove_collation_c_test")
self.assertEqual(operation.describe(), "Remove collation C_test")
project_state = ProjectState()
new_state = project_state.clone()
# Remove a collation.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("DROP COLLATION", captured_queries[0]["sql"])
# Removing a nonexistent collation raises an exception.
with self.assertRaisesMessage(ProgrammingError, "does not exist"):
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
# Reversal.
with CaptureQueriesContext(connection) as captured_queries:
with connection.schema_editor(atomic=False) as editor:
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
self.assertEqual(len(captured_queries), 1)
self.assertIn("CREATE COLLATION", captured_queries[0]["sql"])
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "RemoveCollation")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"name": "C_test", "locale": "C"})
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
@modify_settings(INSTALLED_APPS={"append": "migrations"})
class AddConstraintNotValidTests(OperationTestBase):
app_label = "test_add_constraint_not_valid"
def test_non_check_constraint_not_supported(self):
constraint = UniqueConstraint(fields=["pink"], name="pony_pink_uniq")
msg = "AddConstraintNotValid.constraint must be a check constraint."
with self.assertRaisesMessage(TypeError, msg):
AddConstraintNotValid(model_name="pony", constraint=constraint)
def test_add(self):
table_name = f"{self.app_label}_pony"
constraint_name = "pony_pink_gte_check"
constraint = CheckConstraint(check=Q(pink__gte=4), name=constraint_name)
operation = AddConstraintNotValid("Pony", constraint=constraint)
project_state, new_state = self.make_test_state(self.app_label, operation)
self.assertEqual(
operation.describe(),
f"Create not valid constraint {constraint_name} on model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
f"pony_{constraint_name}_not_valid",
)
self.assertEqual(
len(new_state.models[self.app_label, "pony"].options["constraints"]),
1,
)
self.assertConstraintNotExists(table_name, constraint_name)
Pony = new_state.apps.get_model(self.app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
Pony.objects.create(pink=2, weight=1.0)
# Add constraint.
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
msg = f'check constraint "{constraint_name}"'
with self.assertRaisesMessage(IntegrityError, msg), transaction.atomic():
Pony.objects.create(pink=3, weight=1.0)
self.assertConstraintExists(table_name, constraint_name)
# Reversal.
with connection.schema_editor(atomic=True) as editor:
operation.database_backwards(
self.app_label, editor, project_state, new_state
)
self.assertConstraintNotExists(table_name, constraint_name)
Pony.objects.create(pink=3, weight=1.0)
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "AddConstraintNotValid")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"model_name": "Pony", "constraint": constraint})
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific tests.")
@modify_settings(INSTALLED_APPS={"append": "migrations"})
class ValidateConstraintTests(OperationTestBase):
app_label = "test_validate_constraint"
def test_validate(self):
constraint_name = "pony_pink_gte_check"
constraint = CheckConstraint(check=Q(pink__gte=4), name=constraint_name)
operation = AddConstraintNotValid("Pony", constraint=constraint)
project_state, new_state = self.make_test_state(self.app_label, operation)
Pony = new_state.apps.get_model(self.app_label, "Pony")
obj = Pony.objects.create(pink=2, weight=1.0)
# Add constraint.
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
operation = ValidateConstraint("Pony", name=constraint_name)
operation.state_forwards(self.app_label, new_state)
self.assertEqual(
operation.describe(),
f"Validate constraint {constraint_name} on model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
f"pony_validate_{constraint_name}",
)
# Validate constraint.
with connection.schema_editor(atomic=True) as editor:
msg = f'check constraint "{constraint_name}"'
with self.assertRaisesMessage(IntegrityError, msg):
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
obj.pink = 5
obj.save()
with connection.schema_editor(atomic=True) as editor:
operation.database_forwards(
self.app_label, editor, project_state, new_state
)
# Reversal is a noop.
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
self.app_label, editor, new_state, project_state
)
# Deconstruction.
name, args, kwargs = operation.deconstruct()
self.assertEqual(name, "ValidateConstraint")
self.assertEqual(args, [])
self.assertEqual(kwargs, {"model_name": "Pony", "name": constraint_name})
|
bfadf4d161d11ae08a15e281f4b89bf6d3cc501c215b951352a774a229ceca0e | """
Indirection layer for PostgreSQL-specific fields, so the tests don't fail when
run with a backend other than PostgreSQL.
"""
import enum
from django.db import models
try:
from django.contrib.postgres.fields import (
ArrayField,
BigIntegerRangeField,
CICharField,
CIEmailField,
CITextField,
DateRangeField,
DateTimeRangeField,
DecimalRangeField,
HStoreField,
IntegerRangeField,
)
from django.contrib.postgres.search import SearchVector, SearchVectorField
except ImportError:
class DummyArrayField(models.Field):
def __init__(self, base_field, size=None, **kwargs):
super().__init__(**kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
kwargs.update(
{
"base_field": "",
"size": 1,
}
)
return name, path, args, kwargs
class DummyContinuousRangeField(models.Field):
def __init__(self, *args, default_bounds="[)", **kwargs):
super().__init__(**kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
kwargs["default_bounds"] = "[)"
return name, path, args, kwargs
ArrayField = DummyArrayField
BigIntegerRangeField = models.Field
CICharField = models.Field
CIEmailField = models.Field
CITextField = models.Field
DateRangeField = models.Field
DateTimeRangeField = DummyContinuousRangeField
DecimalRangeField = DummyContinuousRangeField
HStoreField = models.Field
IntegerRangeField = models.Field
SearchVector = models.Expression
SearchVectorField = models.Field
class EnumField(models.CharField):
def get_prep_value(self, value):
return value.value if isinstance(value, enum.Enum) else value
|
b894c08cb7d2e052846059b4e8360db4e11490017738d3f6e53e38052ff10fcc | from unittest import mock
from django.contrib.postgres.indexes import (
BloomIndex,
BrinIndex,
BTreeIndex,
GinIndex,
GistIndex,
HashIndex,
OpClass,
SpGistIndex,
)
from django.db import NotSupportedError, connection
from django.db.models import CharField, F, Index, Q
from django.db.models.functions import Cast, Collate, Length, Lower
from django.test import skipUnlessDBFeature
from django.test.utils import modify_settings, register_lookup
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .fields import SearchVector, SearchVectorField
from .models import CharFieldModel, IntegerArrayModel, Scene, TextFieldModel
class IndexTestMixin:
def test_name_auto_generation(self):
index = self.index_class(fields=["field"])
index.set_name_with_model(CharFieldModel)
self.assertRegex(
index.name, r"postgres_te_field_[0-9a-f]{6}_%s" % self.index_class.suffix
)
def test_deconstruction_no_customization(self):
index = self.index_class(
fields=["title"], name="test_title_%s" % self.index_class.suffix
)
path, args, kwargs = index.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.indexes.%s" % self.index_class.__name__
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{"fields": ["title"], "name": "test_title_%s" % self.index_class.suffix},
)
def test_deconstruction_with_expressions_no_customization(self):
name = f"test_title_{self.index_class.suffix}"
index = self.index_class(Lower("title"), name=name)
path, args, kwargs = index.deconstruct()
self.assertEqual(
path,
f"django.contrib.postgres.indexes.{self.index_class.__name__}",
)
self.assertEqual(args, (Lower("title"),))
self.assertEqual(kwargs, {"name": name})
class BloomIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BloomIndex
def test_suffix(self):
self.assertEqual(BloomIndex.suffix, "bloom")
def test_deconstruction(self):
index = BloomIndex(fields=["title"], name="test_bloom", length=80, columns=[4])
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BloomIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_bloom",
"length": 80,
"columns": [4],
},
)
def test_invalid_fields(self):
msg = "Bloom indexes support a maximum of 32 fields."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"] * 33, name="test_bloom")
def test_invalid_columns(self):
msg = "BloomIndex.columns must be a list or tuple."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns="x")
msg = "BloomIndex.columns cannot have more values than fields."
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns=[4, 3])
def test_invalid_columns_value(self):
msg = "BloomIndex.columns must contain integers from 1 to 4095."
for length in (0, 4096):
with self.subTest(length), self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", columns=[length])
def test_invalid_length(self):
msg = "BloomIndex.length must be None or an integer from 1 to 4096."
for length in (0, 4097):
with self.subTest(length), self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=["title"], name="test_bloom", length=length)
class BrinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BrinIndex
def test_suffix(self):
self.assertEqual(BrinIndex.suffix, "brin")
def test_deconstruction(self):
index = BrinIndex(
fields=["title"],
name="test_title_brin",
autosummarize=True,
pages_per_range=16,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BrinIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_brin",
"autosummarize": True,
"pages_per_range": 16,
},
)
def test_invalid_pages_per_range(self):
with self.assertRaisesMessage(
ValueError, "pages_per_range must be None or a positive integer"
):
BrinIndex(fields=["title"], name="test_title_brin", pages_per_range=0)
class BTreeIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BTreeIndex
def test_suffix(self):
self.assertEqual(BTreeIndex.suffix, "btree")
def test_deconstruction(self):
index = BTreeIndex(fields=["title"], name="test_title_btree", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.BTreeIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_btree", "fillfactor": 80}
)
class GinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GinIndex
def test_suffix(self):
self.assertEqual(GinIndex.suffix, "gin")
def test_deconstruction(self):
index = GinIndex(
fields=["title"],
name="test_title_gin",
fastupdate=True,
gin_pending_list_limit=128,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.GinIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_gin",
"fastupdate": True,
"gin_pending_list_limit": 128,
},
)
class GistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GistIndex
def test_suffix(self):
self.assertEqual(GistIndex.suffix, "gist")
def test_deconstruction(self):
index = GistIndex(
fields=["title"], name="test_title_gist", buffering=False, fillfactor=80
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.GistIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"fields": ["title"],
"name": "test_title_gist",
"buffering": False,
"fillfactor": 80,
},
)
class HashIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = HashIndex
def test_suffix(self):
self.assertEqual(HashIndex.suffix, "hash")
def test_deconstruction(self):
index = HashIndex(fields=["title"], name="test_title_hash", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.HashIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_hash", "fillfactor": 80}
)
class SpGistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = SpGistIndex
def test_suffix(self):
self.assertEqual(SpGistIndex.suffix, "spgist")
def test_deconstruction(self):
index = SpGistIndex(fields=["title"], name="test_title_spgist", fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, "django.contrib.postgres.indexes.SpGistIndex")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"fields": ["title"], "name": "test_title_spgist", "fillfactor": 80}
)
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class SchemaTests(PostgreSQLTestCase):
get_opclass_query = """
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = %s
"""
def get_constraints(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_gin_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn(
"field", self.get_constraints(IntegerArrayModel._meta.db_table)
)
# Add the index
index_name = "integer_array_model_field_gin"
index = GinIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
# Check gin index was added
self.assertEqual(constraints[index_name]["type"], GinIndex.suffix)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_gin_fastupdate(self):
index_name = "integer_array_gin_fastupdate"
index = GinIndex(fields=["field"], name=index_name, fastupdate=False)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
self.assertEqual(constraints[index_name]["options"], ["fastupdate=off"])
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_partial_gin_index(self):
with register_lookup(CharField, Length):
index_name = "char_field_gin_partial_idx"
index = GinIndex(
fields=["field"], name=index_name, condition=Q(field__length=40)
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_partial_gin_index_with_tablespace(self):
with register_lookup(CharField, Length):
index_name = "char_field_gin_partial_idx"
index = GinIndex(
fields=["field"],
name=index_name,
condition=Q(field__length=40),
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
self.assertIn(
'TABLESPACE "pg_default" ',
str(index.create_sql(CharFieldModel, editor)),
)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gin_parameters(self):
index_name = "integer_array_gin_params"
index = GinIndex(
fields=["field"],
name=index_name,
fastupdate=True,
gin_pending_list_limit=64,
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
self.assertIn(
") WITH (gin_pending_list_limit = 64, fastupdate = on) TABLESPACE",
str(index.create_sql(IntegerArrayModel, editor)),
)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], "gin")
self.assertEqual(
constraints[index_name]["options"],
["gin_pending_list_limit=64", "fastupdate=on"],
)
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(
index_name, self.get_constraints(IntegerArrayModel._meta.db_table)
)
def test_trigram_op_class_gin_index(self):
index_name = "trigram_op_class_gin"
index = GinIndex(OpClass(F("scene"), name="gin_trgm_ops"), name=index_name)
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("gin_trgm_ops", index_name)])
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GinIndex.suffix)
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_cast_search_vector_gin_index(self):
index_name = "cast_search_vector_gin"
index = GinIndex(Cast("field", SearchVectorField()), name=index_name)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
sql = index.create_sql(TextFieldModel, editor)
table = TextFieldModel._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GinIndex.suffix)
self.assertIs(sql.references_column(table, "field"), True)
self.assertIn("::tsvector", str(sql))
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_bloom_index(self):
index_name = "char_field_model_field_bloom"
index = BloomIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BloomIndex.suffix)
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_bloom_parameters(self):
index_name = "char_field_model_field_bloom_params"
index = BloomIndex(fields=["field"], name=index_name, length=512, columns=[3])
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BloomIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["length=512", "col1=3"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_brin_index(self):
index_name = "char_field_model_field_brin"
index = BrinIndex(fields=["field"], name=index_name, pages_per_range=4)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BrinIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["pages_per_range=4"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_brin_parameters(self):
index_name = "char_field_brin_params"
index = BrinIndex(fields=["field"], name=index_name, autosummarize=True)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BrinIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["autosummarize=on"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_btree_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_btree"
index = BTreeIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], BTreeIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_btree_parameters(self):
index_name = "integer_array_btree_fillfactor"
index = BTreeIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], BTreeIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_gist"
index = GistIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_gist_parameters(self):
index_name = "integer_array_gist_buffering"
index = GistIndex(
fields=["field"], name=index_name, buffering=True, fillfactor=80
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
self.assertEqual(
constraints[index_name]["options"], ["buffering=on", "fillfactor=80"]
)
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
@skipUnlessDBFeature("supports_covering_gist_indexes")
def test_gist_include(self):
index_name = "scene_gist_include_setting"
index = GistIndex(name=index_name, fields=["scene"], include=["setting"])
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["type"], GistIndex.suffix)
self.assertEqual(constraints[index_name]["columns"], ["scene", "setting"])
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_gist_include_not_supported(self):
index_name = "gist_include_exception"
index = GistIndex(fields=["scene"], name=index_name, include=["setting"])
msg = "Covering GiST indexes require PostgreSQL 12+."
with self.assertRaisesMessage(NotSupportedError, msg):
with mock.patch(
"django.db.backends.postgresql.features.DatabaseFeatures."
"supports_covering_gist_indexes",
False,
):
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_tsvector_op_class_gist_index(self):
index_name = "tsvector_op_class_gist"
index = GistIndex(
OpClass(
SearchVector("scene", "setting", config="english"),
name="tsvector_ops",
),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
sql = index.create_sql(Scene, editor)
table = Scene._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertIn(constraints[index_name]["type"], GistIndex.suffix)
self.assertIs(sql.references_column(table, "scene"), True)
self.assertIs(sql.references_column(table, "setting"), True)
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_hash_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = "char_field_model_field_hash"
index = HashIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], HashIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_hash_parameters(self):
index_name = "integer_array_hash_fillfactor"
index = HashIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], HashIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(CharFieldModel._meta.db_table)
)
def test_spgist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn("field", self.get_constraints(TextFieldModel._meta.db_table))
# Add the index.
index_name = "text_field_model_field_spgist"
index = SpGistIndex(fields=["field"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
constraints = self.get_constraints(TextFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(TextFieldModel._meta.db_table)
)
def test_spgist_parameters(self):
index_name = "text_field_model_spgist_fillfactor"
index = SpGistIndex(fields=["field"], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
self.assertEqual(constraints[index_name]["options"], ["fillfactor=80"])
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(
index_name, self.get_constraints(TextFieldModel._meta.db_table)
)
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_spgist_include(self):
index_name = "scene_spgist_include_setting"
index = SpGistIndex(name=index_name, fields=["scene"], include=["setting"])
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["type"], SpGistIndex.suffix)
self.assertEqual(constraints[index_name]["columns"], ["scene", "setting"])
with connection.schema_editor() as editor:
editor.remove_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_spgist_include_not_supported(self):
index_name = "spgist_include_exception"
index = SpGistIndex(fields=["scene"], name=index_name, include=["setting"])
msg = "Covering SP-GiST indexes require PostgreSQL 14+."
with self.assertRaisesMessage(NotSupportedError, msg):
with mock.patch(
"django.db.backends.postgresql.features.DatabaseFeatures."
"supports_covering_spgist_indexes",
False,
):
with connection.schema_editor() as editor:
editor.add_index(Scene, index)
self.assertNotIn(index_name, self.get_constraints(Scene._meta.db_table))
def test_op_class(self):
index_name = "test_op_class"
index = Index(
OpClass(Lower("field"), name="text_pattern_ops"),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
def test_op_class_descending_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
index_name = "test_op_class_descending_collation"
index = Index(
Collate(
OpClass(Lower("field"), name="text_pattern_ops").desc(nulls_last=True),
collation=collation,
),
name=index_name,
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
self.assertIn(
"COLLATE %s" % editor.quote_name(collation),
str(index.create_sql(TextFieldModel, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
table = TextFieldModel._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
with connection.schema_editor() as editor:
editor.remove_index(TextFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(table))
def test_op_class_descending_partial(self):
index_name = "test_op_class_descending_partial"
index = Index(
OpClass(Lower("field"), name="text_pattern_ops").desc(),
name=index_name,
condition=Q(field__contains="China"),
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
def test_op_class_descending_partial_tablespace(self):
index_name = "test_op_class_descending_partial_tablespace"
index = Index(
OpClass(Lower("field").desc(), name="text_pattern_ops"),
name=index_name,
condition=Q(field__contains="China"),
db_tablespace="pg_default",
)
with connection.schema_editor() as editor:
editor.add_index(TextFieldModel, index)
self.assertIn(
'TABLESPACE "pg_default" ',
str(index.create_sql(TextFieldModel, editor)),
)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [index_name])
self.assertCountEqual(cursor.fetchall(), [("text_pattern_ops", index_name)])
constraints = self.get_constraints(TextFieldModel._meta.db_table)
self.assertIn(index_name, constraints)
self.assertEqual(constraints[index_name]["orders"], ["DESC"])
|
d11b1ae050b6a98268ba736d77c9e2058cf95364b9b06f665eeda7fb11ea1901 | SECRET_KEY = "abcdefg"
INSTALLED_APPS = [
"django.contrib.postgres",
]
|
eff59028d9850df2025fe1b1fc2aa0d21257805a9c86e2a593b8dc1689525513 | from io import StringIO
from django.core.management import call_command
from django.test.utils import modify_settings
from . import PostgreSQLTestCase
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class InspectDBTests(PostgreSQLTestCase):
def assertFieldsInModel(self, model, field_outputs):
out = StringIO()
call_command(
"inspectdb",
table_name_filter=lambda tn: tn.startswith(model),
stdout=out,
)
output = out.getvalue()
for field_output in field_outputs:
self.assertIn(field_output, output)
def test_range_fields(self):
self.assertFieldsInModel(
"postgres_tests_rangesmodel",
[
"ints = django.contrib.postgres.fields.IntegerRangeField(blank=True, "
"null=True)",
"bigints = django.contrib.postgres.fields.BigIntegerRangeField("
"blank=True, null=True)",
"decimals = django.contrib.postgres.fields.DecimalRangeField("
"blank=True, null=True)",
"timestamps = django.contrib.postgres.fields.DateTimeRangeField("
"blank=True, null=True)",
"dates = django.contrib.postgres.fields.DateRangeField(blank=True, "
"null=True)",
],
)
|
8e5af5dddbfed0a4caf44729b0beea24966998bdfb95b303e709961e79c3905c | from decimal import Decimal
from django.db.backends.signals import connection_created
from django.db.migrations.writer import MigrationWriter
from django.test.utils import modify_settings
from . import PostgreSQLTestCase
try:
from psycopg2.extras import DateRange, DateTimeRange, DateTimeTZRange, NumericRange
from django.contrib.postgres.fields import (
DateRangeField,
DateTimeRangeField,
DecimalRangeField,
IntegerRangeField,
)
except ImportError:
pass
class PostgresConfigTests(PostgreSQLTestCase):
def test_register_type_handlers_connection(self):
from django.contrib.postgres.signals import register_type_handlers
self.assertNotIn(
register_type_handlers, connection_created._live_receivers(None)
)
with modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}):
self.assertIn(
register_type_handlers, connection_created._live_receivers(None)
)
self.assertNotIn(
register_type_handlers, connection_created._live_receivers(None)
)
def test_register_serializer_for_migrations(self):
tests = (
(DateRange(empty=True), DateRangeField),
(DateTimeRange(empty=True), DateRangeField),
(DateTimeTZRange(None, None, "[]"), DateTimeRangeField),
(NumericRange(Decimal("1.0"), Decimal("5.0"), "()"), DecimalRangeField),
(NumericRange(1, 10), IntegerRangeField),
)
def assertNotSerializable():
for default, test_field in tests:
with self.subTest(default=default):
field = test_field(default=default)
with self.assertRaisesMessage(
ValueError, "Cannot serialize: %s" % default.__class__.__name__
):
MigrationWriter.serialize(field)
assertNotSerializable()
with self.modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"}):
for default, test_field in tests:
with self.subTest(default=default):
field = test_field(default=default)
serialized_field, imports = MigrationWriter.serialize(field)
self.assertEqual(
imports,
{
"import django.contrib.postgres.fields.ranges",
"import psycopg2.extras",
},
)
self.assertIn(
"%s.%s(default=psycopg2.extras.%r)"
% (
field.__module__,
field.__class__.__name__,
default,
),
serialized_field,
)
assertNotSerializable()
|
bf39059f66538e9254af8a1a78416b3dd0a6bdc52503ec3493242fb719e390b3 | """
The citext PostgreSQL extension supports indexing of case-insensitive text
strings and thus eliminates the need for operations such as iexact and other
modifiers to enforce use of an index.
"""
from django.db import IntegrityError
from django.test.utils import modify_settings
from . import PostgreSQLTestCase
from .models import CITestModel
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class CITextTestCase(PostgreSQLTestCase):
case_sensitive_lookups = ("contains", "startswith", "endswith", "regex")
@classmethod
def setUpTestData(cls):
cls.john = CITestModel.objects.create(
name="JoHn",
email="[email protected]",
description="Average Joe named JoHn",
array_field=["JoE", "jOhn"],
)
def test_equal_lowercase(self):
"""
citext removes the need for iexact as the index is case-insensitive.
"""
self.assertEqual(
CITestModel.objects.filter(name=self.john.name.lower()).count(), 1
)
self.assertEqual(
CITestModel.objects.filter(email=self.john.email.lower()).count(), 1
)
self.assertEqual(
CITestModel.objects.filter(
description=self.john.description.lower()
).count(),
1,
)
def test_fail_citext_primary_key(self):
"""
Creating an entry for a citext field used as a primary key which
clashes with an existing value isn't allowed.
"""
with self.assertRaises(IntegrityError):
CITestModel.objects.create(name="John")
def test_array_field(self):
instance = CITestModel.objects.get()
self.assertEqual(instance.array_field, self.john.array_field)
self.assertTrue(
CITestModel.objects.filter(array_field__contains=["joe"]).exists()
)
def test_lookups_name_char(self):
for lookup in self.case_sensitive_lookups:
with self.subTest(lookup=lookup):
query = {"name__{}".format(lookup): "john"}
self.assertSequenceEqual(
CITestModel.objects.filter(**query), [self.john]
)
def test_lookups_description_text(self):
for lookup, string in zip(
self.case_sensitive_lookups, ("average", "average joe", "john", "Joe.named")
):
with self.subTest(lookup=lookup, string=string):
query = {"description__{}".format(lookup): string}
self.assertSequenceEqual(
CITestModel.objects.filter(**query), [self.john]
)
def test_lookups_email(self):
for lookup, string in zip(
self.case_sensitive_lookups, ("john", "john", "john.com", "john.com")
):
with self.subTest(lookup=lookup, string=string):
query = {"email__{}".format(lookup): string}
self.assertSequenceEqual(
CITestModel.objects.filter(**query), [self.john]
)
|
e811dc433231286bdf90fca9a46a4035d3ba0e439cad79ecb1808e1ee15661c1 | import os
import subprocess
import sys
from . import PostgreSQLSimpleTestCase
class PostgresIntegrationTests(PostgreSQLSimpleTestCase):
def test_check(self):
test_environ = os.environ.copy()
if "DJANGO_SETTINGS_MODULE" in test_environ:
del test_environ["DJANGO_SETTINGS_MODULE"]
test_environ["PYTHONPATH"] = os.path.join(os.path.dirname(__file__), "../../")
result = subprocess.run(
[
sys.executable,
"-m",
"django",
"check",
"--settings",
"integration_settings",
],
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
cwd=os.path.dirname(__file__),
env=test_environ,
encoding="utf-8",
)
self.assertEqual(result.returncode, 0, msg=result.stderr)
|
7a1f080fe07a40351b7df819f518252474c7bf5875b01875428e85bfc117eb69 | import json
from django.core import checks, exceptions, serializers
from django.db import connection
from django.db.models import F, OuterRef, Subquery
from django.db.models.expressions import RawSQL
from django.forms import Form
from django.test.utils import CaptureQueriesContext, isolate_apps
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import HStoreModel, PostgreSQLModel
try:
from django.contrib.postgres import forms
from django.contrib.postgres.fields import HStoreField
from django.contrib.postgres.fields.hstore import KeyTransform
from django.contrib.postgres.validators import KeysValidator
except ImportError:
pass
class SimpleTests(PostgreSQLTestCase):
def test_save_load_success(self):
value = {"a": "b"}
instance = HStoreModel(field=value)
instance.save()
reloaded = HStoreModel.objects.get()
self.assertEqual(reloaded.field, value)
def test_null(self):
instance = HStoreModel(field=None)
instance.save()
reloaded = HStoreModel.objects.get()
self.assertIsNone(reloaded.field)
def test_value_null(self):
value = {"a": None}
instance = HStoreModel(field=value)
instance.save()
reloaded = HStoreModel.objects.get()
self.assertEqual(reloaded.field, value)
def test_key_val_cast_to_string(self):
value = {"a": 1, "b": "B", 2: "c", "ï": "ê"}
expected_value = {"a": "1", "b": "B", "2": "c", "ï": "ê"}
instance = HStoreModel.objects.create(field=value)
instance = HStoreModel.objects.get()
self.assertEqual(instance.field, expected_value)
instance = HStoreModel.objects.get(field__a=1)
self.assertEqual(instance.field, expected_value)
instance = HStoreModel.objects.get(field__has_keys=[2, "a", "ï"])
self.assertEqual(instance.field, expected_value)
def test_array_field(self):
value = [
{"a": 1, "b": "B", 2: "c", "ï": "ê"},
{"a": 1, "b": "B", 2: "c", "ï": "ê"},
]
expected_value = [
{"a": "1", "b": "B", "2": "c", "ï": "ê"},
{"a": "1", "b": "B", "2": "c", "ï": "ê"},
]
instance = HStoreModel.objects.create(array_field=value)
instance.refresh_from_db()
self.assertEqual(instance.array_field, expected_value)
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = HStoreModel.objects.bulk_create(
[
HStoreModel(field={"a": "b"}),
HStoreModel(field={"a": "b", "c": "d"}),
HStoreModel(field={"c": "d"}),
HStoreModel(field={}),
HStoreModel(field=None),
HStoreModel(field={"cat": "TigrOu", "breed": "birman"}),
HStoreModel(field={"cat": "minou", "breed": "ragdoll"}),
HStoreModel(field={"cat": "kitty", "breed": "Persian"}),
HStoreModel(field={"cat": "Kit Kat", "breed": "persian"}),
]
)
def test_exact(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__exact={"a": "b"}), self.objs[:1]
)
def test_contained_by(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__contained_by={"a": "b", "c": "d"}),
self.objs[:4],
)
def test_contains(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__contains={"a": "b"}), self.objs[:2]
)
def test_in_generator(self):
def search():
yield {"a": "b"}
self.assertSequenceEqual(
HStoreModel.objects.filter(field__in=search()), self.objs[:1]
)
def test_has_key(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_key="c"), self.objs[1:3]
)
def test_has_keys(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_keys=["a", "c"]), self.objs[1:2]
)
def test_has_any_keys(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_any_keys=["a", "c"]), self.objs[:3]
)
def test_key_transform(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a="b"), self.objs[:2]
)
def test_key_transform_raw_expression(self):
expr = RawSQL("%s::hstore", ["x => b, y => c"])
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a=KeyTransform("x", expr)), self.objs[:2]
)
def test_key_transform_annotation(self):
qs = HStoreModel.objects.annotate(a=F("field__a"))
self.assertCountEqual(
qs.values_list("a", flat=True),
["b", "b", None, None, None, None, None, None, None],
)
def test_keys(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__keys=["a"]), self.objs[:1]
)
def test_values(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__values=["b"]), self.objs[:1]
)
def test_field_chaining_contains(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a__contains="b"), self.objs[:2]
)
def test_field_chaining_icontains(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__icontains="INo"),
[self.objs[6]],
)
def test_field_chaining_startswith(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__startswith="kit"),
[self.objs[7]],
)
def test_field_chaining_istartswith(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__istartswith="kit"),
self.objs[7:],
)
def test_field_chaining_endswith(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__endswith="ou"),
[self.objs[6]],
)
def test_field_chaining_iendswith(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__iendswith="ou"),
self.objs[5:7],
)
def test_field_chaining_iexact(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__breed__iexact="persian"),
self.objs[7:],
)
def test_field_chaining_regex(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__regex=r"ou$"),
[self.objs[6]],
)
def test_field_chaining_iregex(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__cat__iregex=r"oU$"),
self.objs[5:7],
)
def test_order_by_field(self):
more_objs = (
HStoreModel.objects.create(field={"g": "637"}),
HStoreModel.objects.create(field={"g": "002"}),
HStoreModel.objects.create(field={"g": "042"}),
HStoreModel.objects.create(field={"g": "981"}),
)
self.assertSequenceEqual(
HStoreModel.objects.filter(field__has_key="g").order_by("field__g"),
[more_objs[1], more_objs[2], more_objs[0], more_objs[3]],
)
def test_keys_contains(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__keys__contains=["a"]), self.objs[:2]
)
def test_values_overlap(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(field__values__overlap=["b", "d"]), self.objs[:3]
)
def test_key_isnull(self):
obj = HStoreModel.objects.create(field={"a": None})
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a__isnull=True),
self.objs[2:9] + [obj],
)
self.assertSequenceEqual(
HStoreModel.objects.filter(field__a__isnull=False), self.objs[:2]
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
HStoreModel.objects.filter(id__in=HStoreModel.objects.filter(field__a="b")),
self.objs[:2],
)
def test_key_sql_injection(self):
with CaptureQueriesContext(connection) as queries:
self.assertFalse(
HStoreModel.objects.filter(
**{
"field__test' = 'a') OR 1 = 1 OR ('d": "x",
}
).exists()
)
self.assertIn(
"""."field" -> 'test'' = ''a'') OR 1 = 1 OR (''d') = 'x' """,
queries[0]["sql"],
)
def test_obj_subquery_lookup(self):
qs = HStoreModel.objects.annotate(
value=Subquery(
HStoreModel.objects.filter(pk=OuterRef("pk")).values("field")
),
).filter(value__a="b")
self.assertSequenceEqual(qs, self.objs[:2])
@isolate_apps("postgres_tests")
class TestChecks(PostgreSQLSimpleTestCase):
def test_invalid_default(self):
class MyModel(PostgreSQLModel):
field = HStoreField(default={})
model = MyModel()
self.assertEqual(
model.check(),
[
checks.Warning(
msg=(
"HStoreField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint="Use a callable instead, e.g., use `dict` instead of `{}`.",
obj=MyModel._meta.get_field("field"),
id="fields.E010",
)
],
)
def test_valid_default(self):
class MyModel(PostgreSQLModel):
field = HStoreField(default=dict)
self.assertEqual(MyModel().check(), [])
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = json.dumps(
[
{
"model": "postgres_tests.hstoremodel",
"pk": None,
"fields": {
"field": json.dumps({"a": "b"}),
"array_field": json.dumps(
[
json.dumps({"a": "b"}),
json.dumps({"b": "a"}),
]
),
},
}
]
)
def test_dumping(self):
instance = HStoreModel(field={"a": "b"}, array_field=[{"a": "b"}, {"b": "a"}])
data = serializers.serialize("json", [instance])
self.assertEqual(json.loads(data), json.loads(self.test_data))
def test_loading(self):
instance = list(serializers.deserialize("json", self.test_data))[0].object
self.assertEqual(instance.field, {"a": "b"})
self.assertEqual(instance.array_field, [{"a": "b"}, {"b": "a"}])
def test_roundtrip_with_null(self):
instance = HStoreModel(field={"a": "b", "c": None})
data = serializers.serialize("json", [instance])
new_instance = list(serializers.deserialize("json", data))[0].object
self.assertEqual(instance.field, new_instance.field)
class TestValidation(PostgreSQLSimpleTestCase):
def test_not_a_string(self):
field = HStoreField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean({"a": 1}, None)
self.assertEqual(cm.exception.code, "not_a_string")
self.assertEqual(
cm.exception.message % cm.exception.params,
"The value of “a” is not a string or null.",
)
def test_none_allowed_as_value(self):
field = HStoreField()
self.assertEqual(field.clean({"a": None}, None), {"a": None})
class TestFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
field = forms.HStoreField()
value = field.clean('{"a": "b"}')
self.assertEqual(value, {"a": "b"})
def test_invalid_json(self):
field = forms.HStoreField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('{"a": "b"')
self.assertEqual(cm.exception.messages[0], "Could not load JSON data.")
self.assertEqual(cm.exception.code, "invalid_json")
def test_non_dict_json(self):
field = forms.HStoreField()
msg = "Input must be a JSON dictionary."
with self.assertRaisesMessage(exceptions.ValidationError, msg) as cm:
field.clean('["a", "b", 1]')
self.assertEqual(cm.exception.code, "invalid_format")
def test_not_string_values(self):
field = forms.HStoreField()
value = field.clean('{"a": 1}')
self.assertEqual(value, {"a": "1"})
def test_none_value(self):
field = forms.HStoreField()
value = field.clean('{"a": null}')
self.assertEqual(value, {"a": None})
def test_empty(self):
field = forms.HStoreField(required=False)
value = field.clean("")
self.assertEqual(value, {})
def test_model_field_formfield(self):
model_field = HStoreField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, forms.HStoreField)
def test_field_has_changed(self):
class HStoreFormTest(Form):
f1 = forms.HStoreField()
form_w_hstore = HStoreFormTest()
self.assertFalse(form_w_hstore.has_changed())
form_w_hstore = HStoreFormTest({"f1": '{"a": 1}'})
self.assertTrue(form_w_hstore.has_changed())
form_w_hstore = HStoreFormTest({"f1": '{"a": 1}'}, initial={"f1": '{"a": 1}'})
self.assertFalse(form_w_hstore.has_changed())
form_w_hstore = HStoreFormTest({"f1": '{"a": 2}'}, initial={"f1": '{"a": 1}'})
self.assertTrue(form_w_hstore.has_changed())
form_w_hstore = HStoreFormTest({"f1": '{"a": 1}'}, initial={"f1": {"a": 1}})
self.assertFalse(form_w_hstore.has_changed())
form_w_hstore = HStoreFormTest({"f1": '{"a": 2}'}, initial={"f1": {"a": 1}})
self.assertTrue(form_w_hstore.has_changed())
class TestValidator(PostgreSQLSimpleTestCase):
def test_simple_valid(self):
validator = KeysValidator(keys=["a", "b"])
validator({"a": "foo", "b": "bar", "c": "baz"})
def test_missing_keys(self):
validator = KeysValidator(keys=["a", "b"])
with self.assertRaises(exceptions.ValidationError) as cm:
validator({"a": "foo", "c": "baz"})
self.assertEqual(cm.exception.messages[0], "Some keys were missing: b")
self.assertEqual(cm.exception.code, "missing_keys")
def test_strict_valid(self):
validator = KeysValidator(keys=["a", "b"], strict=True)
validator({"a": "foo", "b": "bar"})
def test_extra_keys(self):
validator = KeysValidator(keys=["a", "b"], strict=True)
with self.assertRaises(exceptions.ValidationError) as cm:
validator({"a": "foo", "b": "bar", "c": "baz"})
self.assertEqual(cm.exception.messages[0], "Some unknown keys were provided: c")
self.assertEqual(cm.exception.code, "extra_keys")
def test_custom_messages(self):
messages = {
"missing_keys": "Foobar",
}
validator = KeysValidator(keys=["a", "b"], strict=True, messages=messages)
with self.assertRaises(exceptions.ValidationError) as cm:
validator({"a": "foo", "c": "baz"})
self.assertEqual(cm.exception.messages[0], "Foobar")
self.assertEqual(cm.exception.code, "missing_keys")
with self.assertRaises(exceptions.ValidationError) as cm:
validator({"a": "foo", "b": "bar", "c": "baz"})
self.assertEqual(cm.exception.messages[0], "Some unknown keys were provided: c")
self.assertEqual(cm.exception.code, "extra_keys")
def test_deconstruct(self):
messages = {
"missing_keys": "Foobar",
}
validator = KeysValidator(keys=["a", "b"], strict=True, messages=messages)
path, args, kwargs = validator.deconstruct()
self.assertEqual(path, "django.contrib.postgres.validators.KeysValidator")
self.assertEqual(args, ())
self.assertEqual(
kwargs, {"keys": ["a", "b"], "strict": True, "messages": messages}
)
|
7789018a278c6abec769060e0ec7ae224920802a27f196368e390503c85dbf6c | from datetime import date
from . import PostgreSQLTestCase
from .models import (
HStoreModel,
IntegerArrayModel,
NestedIntegerArrayModel,
NullableIntegerArrayModel,
OtherTypesArrayModel,
RangesModel,
)
try:
from psycopg2.extras import DateRange, NumericRange
except ImportError:
pass # psycopg2 isn't installed.
class BulkSaveTests(PostgreSQLTestCase):
def test_bulk_update(self):
test_data = [
(IntegerArrayModel, "field", [], [1, 2, 3]),
(NullableIntegerArrayModel, "field", [1, 2, 3], None),
(NestedIntegerArrayModel, "field", [], [[1, 2, 3]]),
(HStoreModel, "field", {}, {1: 2}),
(RangesModel, "ints", None, NumericRange(lower=1, upper=10)),
(
RangesModel,
"dates",
None,
DateRange(lower=date.today(), upper=date.today()),
),
(OtherTypesArrayModel, "ips", [], ["1.2.3.4"]),
(OtherTypesArrayModel, "json", [], [{"a": "b"}]),
]
for Model, field, initial, new in test_data:
with self.subTest(model=Model, field=field):
instances = Model.objects.bulk_create(
Model(**{field: initial}) for _ in range(20)
)
for instance in instances:
setattr(instance, field, new)
Model.objects.bulk_update(instances, [field])
self.assertSequenceEqual(
Model.objects.filter(**{field: new}), instances
)
|
4e18cc678185d77610bbfbf3731303ae794bbda8efd994cec5d3f10c8afa1007 | import datetime
from unittest import mock
from django.contrib.postgres.indexes import OpClass
from django.db import IntegrityError, NotSupportedError, connection, transaction
from django.db.models import (
CheckConstraint,
Deferrable,
F,
Func,
IntegerField,
Q,
UniqueConstraint,
)
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import Cast, Left, Lower
from django.test import ignore_warnings, modify_settings, skipUnlessDBFeature
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango50Warning
from . import PostgreSQLTestCase
from .models import HotelReservation, IntegerArrayModel, RangesModel, Room, Scene
try:
from psycopg2.extras import DateRange, NumericRange
from django.contrib.postgres.constraints import ExclusionConstraint
from django.contrib.postgres.fields import (
DateTimeRangeField,
RangeBoundary,
RangeOperators,
)
except ImportError:
pass
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class SchemaTests(PostgreSQLTestCase):
get_opclass_query = """
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = %s
"""
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_check_constraint_range_value(self):
constraint_name = "ints_between"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
check=Q(ints__contained_by=NumericRange(10, 30)),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(20, 50))
RangesModel.objects.create(ints=(10, 30))
def test_check_constraint_daterange_contains(self):
constraint_name = "dates_contains"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
check=Q(dates__contains=F("dates_inner")),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
date_1 = datetime.date(2016, 1, 1)
date_2 = datetime.date(2016, 1, 4)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2.replace(day=5)),
)
RangesModel.objects.create(
dates=(date_1, date_2),
dates_inner=(date_1, date_2),
)
def test_check_constraint_datetimerange_contains(self):
constraint_name = "timestamps_contains"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = CheckConstraint(
check=Q(timestamps__contains=F("timestamps_inner")),
name=constraint_name,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
datetime_1 = datetime.datetime(2016, 1, 1)
datetime_2 = datetime.datetime(2016, 1, 2, 12)
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2.replace(hour=13)),
)
RangesModel.objects.create(
timestamps=(datetime_1, datetime_2),
timestamps_inner=(datetime_1, datetime_2),
)
def test_opclass(self):
constraint = UniqueConstraint(
name="test_opclass",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
self.assertIn(constraint.name, self.get_constraints(Scene._meta.db_table))
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Scene, constraint)
self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table))
def test_opclass_multiple_columns(self):
constraint = UniqueConstraint(
name="test_opclass_multiple",
fields=["scene", "setting"],
opclasses=["varchar_pattern_ops", "text_pattern_ops"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
expected_opclasses = (
("varchar_pattern_ops", constraint.name),
("text_pattern_ops", constraint.name),
)
self.assertCountEqual(cursor.fetchall(), expected_opclasses)
def test_opclass_partial(self):
constraint = UniqueConstraint(
name="test_opclass_partial",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
condition=Q(setting__contains="Sir Bedemir's Castle"),
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertCountEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_opclass_include(self):
constraint = UniqueConstraint(
name="test_opclass_include",
fields=["scene"],
opclasses=["varchar_pattern_ops"],
include=["setting"],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertCountEqual(
cursor.fetchall(),
[("varchar_pattern_ops", constraint.name)],
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_opclass_func(self):
constraint = UniqueConstraint(
OpClass(Lower("scene"), name="text_pattern_ops"),
name="test_opclass_func",
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
constraints = self.get_constraints(Scene._meta.db_table)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertIn(constraint.name, constraints)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[("text_pattern_ops", constraint.name)],
)
Scene.objects.create(scene="Scene 10", setting="The dark forest of Ewing")
with self.assertRaises(IntegrityError), transaction.atomic():
Scene.objects.create(scene="ScEnE 10", setting="Sir Bedemir's Castle")
Scene.objects.create(scene="Scene 5", setting="Sir Bedemir's Castle")
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Scene, constraint)
self.assertNotIn(constraint.name, self.get_constraints(Scene._meta.db_table))
Scene.objects.create(scene="ScEnE 10", setting="Sir Bedemir's Castle")
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class ExclusionConstraintTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_invalid_condition(self):
msg = "ExclusionConstraint.condition must be a Q instance."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="GIST",
name="exclude_invalid_condition",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
condition=F("invalid"),
)
def test_invalid_index_type(self):
msg = "Exclusion constraints only support GiST or SP-GiST indexes."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="gin",
name="exclude_invalid_index_type",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
)
def test_invalid_expressions(self):
msg = "The expressions must be a list of 2-tuples."
for expressions in (["foo"], [("foo")], [("foo_1", "foo_2", "foo_3")]):
with self.subTest(expressions), self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
index_type="GIST",
name="exclude_invalid_expressions",
expressions=expressions,
)
def test_empty_expressions(self):
msg = "At least one expression is required to define an exclusion constraint."
for empty_expressions in (None, []):
with self.subTest(empty_expressions), self.assertRaisesMessage(
ValueError, msg
):
ExclusionConstraint(
index_type="GIST",
name="exclude_empty_expressions",
expressions=empty_expressions,
)
def test_invalid_deferrable(self):
msg = "ExclusionConstraint.deferrable must be a Deferrable instance."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_deferrable",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
deferrable="invalid",
)
def test_deferrable_with_condition(self):
msg = "ExclusionConstraint with conditions cannot be deferred."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_condition",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
condition=Q(cancelled=False),
deferrable=Deferrable.DEFERRED,
)
def test_invalid_include_type(self):
msg = "ExclusionConstraint.include must be a list or tuple."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_include",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
include="invalid",
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_invalid_opclasses_type(self):
msg = "ExclusionConstraint.opclasses must be a list or tuple."
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_opclasses",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
opclasses="invalid",
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_opclasses_and_expressions_same_length(self):
msg = (
"ExclusionConstraint.expressions and "
"ExclusionConstraint.opclasses must have the same number of "
"elements."
)
with self.assertRaisesMessage(ValueError, msg):
ExclusionConstraint(
name="exclude_invalid_expressions_opclasses_length",
expressions=[(F("datespan"), RangeOperators.OVERLAPS)],
opclasses=["foo", "bar"],
)
def test_repr(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '&&'), (F(room), '=')] name='exclude_overlapping'>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
condition=Q(cancelled=False),
index_type="SPGiST",
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='SPGiST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"condition=(AND: ('cancelled', False))>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
deferrable=Deferrable.IMMEDIATE,
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"deferrable=Deferrable.IMMEDIATE>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
include=["cancelled", "room"],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"include=('cancelled', 'room')>",
)
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(OpClass("datespan", name="range_ops"), RangeOperators.ADJACENT_TO),
],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(OpClass(F(datespan), name=range_ops), '-|-')] "
"name='exclude_overlapping'>",
)
def test_eq(self):
constraint_1 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(F("room"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
constraint_2 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
constraint_3 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
condition=Q(cancelled=False),
)
constraint_4 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.DEFERRED,
)
constraint_5 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.IMMEDIATE,
)
constraint_6 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
deferrable=Deferrable.IMMEDIATE,
include=["cancelled"],
)
constraint_7 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
include=["cancelled"],
)
with ignore_warnings(category=RemovedInDjango50Warning):
constraint_8 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
include=["cancelled"],
opclasses=["range_ops", "range_ops"],
)
constraint_9 = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
opclasses=["range_ops", "range_ops"],
)
self.assertNotEqual(constraint_2, constraint_9)
self.assertNotEqual(constraint_7, constraint_8)
self.assertEqual(constraint_1, constraint_1)
self.assertEqual(constraint_1, mock.ANY)
self.assertNotEqual(constraint_1, constraint_2)
self.assertNotEqual(constraint_1, constraint_3)
self.assertNotEqual(constraint_1, constraint_4)
self.assertNotEqual(constraint_2, constraint_3)
self.assertNotEqual(constraint_2, constraint_4)
self.assertNotEqual(constraint_2, constraint_7)
self.assertNotEqual(constraint_4, constraint_5)
self.assertNotEqual(constraint_5, constraint_6)
self.assertNotEqual(constraint_1, object())
def test_deconstruct(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
},
)
def test_deconstruct_index_type(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
index_type="SPGIST",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"index_type": "SPGIST",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
},
)
def test_deconstruct_condition(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [
("datespan", RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
"condition": Q(cancelled=False),
},
)
def test_deconstruct_deferrable(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
deferrable=Deferrable.DEFERRED,
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [("datespan", RangeOperators.OVERLAPS)],
"deferrable": Deferrable.DEFERRED,
},
)
def test_deconstruct_include(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
include=["cancelled", "room"],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [("datespan", RangeOperators.OVERLAPS)],
"include": ("cancelled", "room"),
},
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_deconstruct_opclasses(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[("datespan", RangeOperators.OVERLAPS)],
opclasses=["range_ops"],
)
path, args, kwargs = constraint.deconstruct()
self.assertEqual(
path, "django.contrib.postgres.constraints.ExclusionConstraint"
)
self.assertEqual(args, ())
self.assertEqual(
kwargs,
{
"name": "exclude_overlapping",
"expressions": [("datespan", RangeOperators.OVERLAPS)],
"opclasses": ["range_ops"],
},
)
def _test_range_overlaps(self, constraint):
# Create exclusion constraint.
self.assertNotIn(
constraint.name, self.get_constraints(HotelReservation._meta.db_table)
)
with connection.schema_editor() as editor:
editor.add_constraint(HotelReservation, constraint)
self.assertIn(
constraint.name, self.get_constraints(HotelReservation._meta.db_table)
)
# Add initial reservations.
room101 = Room.objects.create(number=101)
room102 = Room.objects.create(number=102)
datetimes = [
timezone.datetime(2018, 6, 20),
timezone.datetime(2018, 6, 24),
timezone.datetime(2018, 6, 26),
timezone.datetime(2018, 6, 28),
timezone.datetime(2018, 6, 29),
]
HotelReservation.objects.create(
datespan=DateRange(datetimes[0].date(), datetimes[1].date()),
start=datetimes[0],
end=datetimes[1],
room=room102,
)
HotelReservation.objects.create(
datespan=DateRange(datetimes[1].date(), datetimes[3].date()),
start=datetimes[1],
end=datetimes[3],
room=room102,
)
# Overlap dates.
with self.assertRaises(IntegrityError), transaction.atomic():
reservation = HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
)
reservation.save()
# Valid range.
HotelReservation.objects.bulk_create(
[
# Other room.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[2].date()),
start=datetimes[1],
end=datetimes[2],
room=room101,
),
# Cancelled reservation.
HotelReservation(
datespan=(datetimes[1].date(), datetimes[1].date()),
start=datetimes[1],
end=datetimes[2],
room=room102,
cancelled=True,
),
# Other adjacent dates.
HotelReservation(
datespan=(datetimes[3].date(), datetimes[4].date()),
start=datetimes[3],
end=datetimes[4],
room=room102,
),
]
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_range_overlaps_custom_opclasses(self):
class TsTzRange(Func):
function = "TSTZRANGE"
output_field = DateTimeRangeField()
constraint = ExclusionConstraint(
name="exclude_overlapping_reservations_custom",
expressions=[
(TsTzRange("start", "end", RangeBoundary()), RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
opclasses=["range_ops", "gist_int4_ops"],
)
self._test_range_overlaps(constraint)
def test_range_overlaps_custom(self):
class TsTzRange(Func):
function = "TSTZRANGE"
output_field = DateTimeRangeField()
constraint = ExclusionConstraint(
name="exclude_overlapping_reservations_custom_opclass",
expressions=[
(
OpClass(TsTzRange("start", "end", RangeBoundary()), "range_ops"),
RangeOperators.OVERLAPS,
),
(OpClass("room", "gist_int4_ops"), RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_overlaps(self):
constraint = ExclusionConstraint(
name="exclude_overlapping_reservations",
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
("room", RangeOperators.EQUAL),
],
condition=Q(cancelled=False),
)
self._test_range_overlaps(constraint)
def test_range_adjacent(self):
constraint_name = "ints_adjacent"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(RangesModel, constraint)
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
def test_expressions_with_params(self):
constraint_name = "scene_left_equal"
self.assertNotIn(constraint_name, self.get_constraints(Scene._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[(Left("scene", 4), RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(Scene, constraint)
self.assertIn(constraint_name, self.get_constraints(Scene._meta.db_table))
def test_expressions_with_key_transform(self):
constraint_name = "exclude_overlapping_reservations_smoking"
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(F("datespan"), RangeOperators.OVERLAPS),
(KeyTextTransform("smoking", "requirements"), RangeOperators.EQUAL),
],
)
with connection.schema_editor() as editor:
editor.add_constraint(HotelReservation, constraint)
self.assertIn(
constraint_name,
self.get_constraints(HotelReservation._meta.db_table),
)
def test_index_transform(self):
constraint_name = "first_index_equal"
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("field__0", RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(IntegerArrayModel, constraint)
self.assertIn(
constraint_name,
self.get_constraints(IntegerArrayModel._meta.db_table),
)
def test_range_adjacent_initially_deferred(self):
constraint_name = "ints_adjacent_deferred"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
adjacent_range = RangesModel.objects.create(ints=(10, 20))
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(constraint_name)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
# Remove adjacent range before the end of transaction.
adjacent_range.delete()
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
@skipUnlessDBFeature("supports_covering_gist_indexes")
def test_range_adjacent_gist_include(self):
constraint_name = "ints_adjacent_gist_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
include=["decimals", "ints"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_range_adjacent_spgist_include(self):
constraint_name = "ints_adjacent_spgist_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["decimals", "ints"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
@skipUnlessDBFeature("supports_covering_gist_indexes")
def test_range_adjacent_gist_include_condition(self):
constraint_name = "ints_adjacent_gist_include_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
include=["decimals"],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_range_adjacent_spgist_include_condition(self):
constraint_name = "ints_adjacent_spgist_include_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["decimals"],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@skipUnlessDBFeature("supports_covering_gist_indexes")
def test_range_adjacent_gist_include_deferrable(self):
constraint_name = "ints_adjacent_gist_include_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
include=["decimals"],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_range_adjacent_spgist_include_deferrable(self):
constraint_name = "ints_adjacent_spgist_include_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["decimals"],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_gist_include_not_supported(self):
constraint_name = "ints_adjacent_gist_include_not_supported"
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
include=["id"],
)
msg = (
"Covering exclusion constraints using a GiST index require "
"PostgreSQL 12+."
)
with connection.schema_editor() as editor:
with mock.patch(
"django.db.backends.postgresql.features.DatabaseFeatures."
"supports_covering_gist_indexes",
False,
):
with self.assertRaisesMessage(NotSupportedError, msg):
editor.add_constraint(RangesModel, constraint)
def test_spgist_include_not_supported(self):
constraint_name = "ints_adjacent_spgist_include_not_supported"
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
include=["id"],
)
msg = (
"Covering exclusion constraints using an SP-GiST index require "
"PostgreSQL 14+."
)
with connection.schema_editor() as editor:
with mock.patch(
"django.db.backends.postgresql.features.DatabaseFeatures."
"supports_covering_spgist_indexes",
False,
):
with self.assertRaisesMessage(NotSupportedError, msg):
editor.add_constraint(RangesModel, constraint)
def test_range_adjacent_opclass(self):
constraint_name = "ints_adjacent_opclass"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
constraints = self.get_constraints(RangesModel._meta.db_table)
self.assertIn(constraint_name, constraints)
with editor.connection.cursor() as cursor:
cursor.execute(SchemaTests.get_opclass_query, [constraint_name])
self.assertEqual(
cursor.fetchall(),
[("range_ops", constraint_name)],
)
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(RangesModel, constraint)
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
def test_range_adjacent_opclass_condition(self):
constraint_name = "ints_adjacent_opclass_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_adjacent_opclass_deferrable(self):
constraint_name = "ints_adjacent_opclass_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@skipUnlessDBFeature("supports_covering_gist_indexes")
def test_range_adjacent_gist_opclass_include(self):
constraint_name = "ints_adjacent_gist_opclass_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
index_type="gist",
include=["decimals"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_range_adjacent_spgist_opclass_include(self):
constraint_name = "ints_adjacent_spgist_opclass_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[
(OpClass("ints", name="range_ops"), RangeOperators.ADJACENT_TO),
],
index_type="spgist",
include=["decimals"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
def test_range_equal_cast(self):
constraint_name = "exclusion_equal_room_cast"
self.assertNotIn(constraint_name, self.get_constraints(Room._meta.db_table))
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[(Cast("number", IntegerField()), RangeOperators.EQUAL)],
)
with connection.schema_editor() as editor:
editor.add_constraint(Room, constraint)
self.assertIn(constraint_name, self.get_constraints(Room._meta.db_table))
@modify_settings(INSTALLED_APPS={"append": "django.contrib.postgres"})
class ExclusionConstraintOpclassesDepracationTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""Get the constraints on the table using a new cursor."""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_warning(self):
msg = (
"The opclasses argument is deprecated in favor of using "
"django.contrib.postgres.indexes.OpClass in "
"ExclusionConstraint.expressions."
)
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
opclasses=["range_ops"],
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_repr(self):
constraint = ExclusionConstraint(
name="exclude_overlapping",
expressions=[(F("datespan"), RangeOperators.ADJACENT_TO)],
opclasses=["range_ops"],
)
self.assertEqual(
repr(constraint),
"<ExclusionConstraint: index_type='GIST' expressions=["
"(F(datespan), '-|-')] name='exclude_overlapping' "
"opclasses=['range_ops']>",
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_range_adjacent_opclasses(self):
constraint_name = "ints_adjacent_opclasses"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
opclasses=["range_ops"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
constraints = self.get_constraints(RangesModel._meta.db_table)
self.assertIn(constraint_name, constraints)
with editor.connection.cursor() as cursor:
cursor.execute(SchemaTests.get_opclass_query, [constraint.name])
self.assertEqual(
cursor.fetchall(),
[("range_ops", constraint.name)],
)
RangesModel.objects.create(ints=(20, 50))
with self.assertRaises(IntegrityError), transaction.atomic():
RangesModel.objects.create(ints=(10, 20))
RangesModel.objects.create(ints=(10, 19))
RangesModel.objects.create(ints=(51, 60))
# Drop the constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(RangesModel, constraint)
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_range_adjacent_opclasses_condition(self):
constraint_name = "ints_adjacent_opclasses_condition"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
opclasses=["range_ops"],
condition=Q(id__gte=100),
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@ignore_warnings(category=RemovedInDjango50Warning)
def test_range_adjacent_opclasses_deferrable(self):
constraint_name = "ints_adjacent_opclasses_deferrable"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
opclasses=["range_ops"],
deferrable=Deferrable.DEFERRED,
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@ignore_warnings(category=RemovedInDjango50Warning)
@skipUnlessDBFeature("supports_covering_gist_indexes")
def test_range_adjacent_gist_opclasses_include(self):
constraint_name = "ints_adjacent_gist_opclasses_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="gist",
opclasses=["range_ops"],
include=["decimals"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
@ignore_warnings(category=RemovedInDjango50Warning)
@skipUnlessDBFeature("supports_covering_spgist_indexes")
def test_range_adjacent_spgist_opclasses_include(self):
constraint_name = "ints_adjacent_spgist_opclasses_include"
self.assertNotIn(
constraint_name, self.get_constraints(RangesModel._meta.db_table)
)
constraint = ExclusionConstraint(
name=constraint_name,
expressions=[("ints", RangeOperators.ADJACENT_TO)],
index_type="spgist",
opclasses=["range_ops"],
include=["decimals"],
)
with connection.schema_editor() as editor:
editor.add_constraint(RangesModel, constraint)
self.assertIn(constraint_name, self.get_constraints(RangesModel._meta.db_table))
|
8b5a0450563d645ef1d45cfbda0f6e5bf37ee662e397c9069bed0102cfa0f398 | import datetime
from xml.dom import minidom
from django.contrib.sites.models import Site
from django.contrib.syndication import views
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings
from django.test.utils import requires_tz_support
from django.utils import timezone
from django.utils.feedgenerator import (
Atom1Feed,
Rss201rev2Feed,
rfc2822_date,
rfc3339_date,
)
from .models import Article, Entry
TZ = timezone.get_default_timezone()
class FeedTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.e1 = Entry.objects.create(
title="My first entry",
updated=datetime.datetime(1980, 1, 1, 12, 30),
published=datetime.datetime(1986, 9, 25, 20, 15, 00),
)
cls.e2 = Entry.objects.create(
title="My second entry",
updated=datetime.datetime(2008, 1, 2, 12, 30),
published=datetime.datetime(2006, 3, 17, 18, 0),
)
cls.e3 = Entry.objects.create(
title="My third entry",
updated=datetime.datetime(2008, 1, 2, 13, 30),
published=datetime.datetime(2005, 6, 14, 10, 45),
)
cls.e4 = Entry.objects.create(
title="A & B < C > D",
updated=datetime.datetime(2008, 1, 3, 13, 30),
published=datetime.datetime(2005, 11, 25, 12, 11, 23),
)
cls.e5 = Entry.objects.create(
title="My last entry",
updated=datetime.datetime(2013, 1, 20, 0, 0),
published=datetime.datetime(2013, 3, 25, 20, 0),
)
cls.a1 = Article.objects.create(
title="My first article",
entry=cls.e1,
updated=datetime.datetime(1986, 11, 21, 9, 12, 18),
published=datetime.datetime(1986, 10, 21, 9, 12, 18),
)
def assertChildNodes(self, elem, expected):
actual = {n.nodeName for n in elem.childNodes}
expected = set(expected)
self.assertEqual(actual, expected)
def assertChildNodeContent(self, elem, expected):
for k, v in expected.items():
self.assertEqual(elem.getElementsByTagName(k)[0].firstChild.wholeText, v)
def assertCategories(self, elem, expected):
self.assertEqual(
{
i.firstChild.wholeText
for i in elem.childNodes
if i.nodeName == "category"
},
set(expected),
)
@override_settings(ROOT_URLCONF="syndication_tests.urls")
class SyndicationFeedTest(FeedTestCase):
"""
Tests for the high-level syndication feed framework.
"""
@classmethod
def setUpClass(cls):
super().setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
def test_rss2_feed(self):
"""
Test the structure and content of feeds generated by Rss201rev2Feed.
"""
response = self.client.get("/syndication/rss2/")
doc = minidom.parseString(response.content)
# Making sure there's only 1 `rss` element and that the correct
# RSS version was specified.
feed_elem = doc.getElementsByTagName("rss")
self.assertEqual(len(feed_elem), 1)
feed = feed_elem[0]
self.assertEqual(feed.getAttribute("version"), "2.0")
self.assertEqual(
feed.getElementsByTagName("language")[0].firstChild.nodeValue, "en"
)
# Making sure there's only one `channel` element w/in the
# `rss` element.
chan_elem = feed.getElementsByTagName("channel")
self.assertEqual(len(chan_elem), 1)
chan = chan_elem[0]
# Find the last build date
d = Entry.objects.latest("published").published
last_build_date = rfc2822_date(timezone.make_aware(d, TZ))
self.assertChildNodes(
chan,
[
"title",
"link",
"description",
"language",
"lastBuildDate",
"item",
"atom:link",
"ttl",
"copyright",
"category",
],
)
self.assertChildNodeContent(
chan,
{
"title": "My blog",
"description": "A more thorough description of my blog.",
"link": "http://example.com/blog/",
"language": "en",
"lastBuildDate": last_build_date,
"ttl": "600",
"copyright": "Copyright (c) 2007, Sally Smith",
},
)
self.assertCategories(chan, ["python", "django"])
# Ensure the content of the channel is correct
self.assertChildNodeContent(
chan,
{
"title": "My blog",
"link": "http://example.com/blog/",
},
)
# Check feed_url is passed
self.assertEqual(
chan.getElementsByTagName("atom:link")[0].getAttribute("href"),
"http://example.com/syndication/rss2/",
)
# Find the pubdate of the first feed item
d = Entry.objects.get(pk=self.e1.pk).published
pub_date = rfc2822_date(timezone.make_aware(d, TZ))
items = chan.getElementsByTagName("item")
self.assertEqual(len(items), Entry.objects.count())
self.assertChildNodeContent(
items[0],
{
"title": "My first entry",
"description": "Overridden description: My first entry",
"link": "http://example.com/blog/%s/" % self.e1.pk,
"guid": "http://example.com/blog/%s/" % self.e1.pk,
"pubDate": pub_date,
"author": "[email protected] (Sally Smith)",
"comments": "/blog/%s/comments" % self.e1.pk,
},
)
self.assertCategories(items[0], ["python", "testing"])
for item in items:
self.assertChildNodes(
item,
[
"title",
"link",
"description",
"guid",
"category",
"pubDate",
"author",
"comments",
],
)
# Assert that <guid> does not have any 'isPermaLink' attribute
self.assertIsNone(
item.getElementsByTagName("guid")[0].attributes.get("isPermaLink")
)
def test_rss2_feed_guid_permalink_false(self):
"""
Test if the 'isPermaLink' attribute of <guid> element of an item
in the RSS feed is 'false'.
"""
response = self.client.get("/syndication/rss2/guid_ispermalink_false/")
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName("rss")[0].getElementsByTagName("channel")[0]
items = chan.getElementsByTagName("item")
for item in items:
self.assertEqual(
item.getElementsByTagName("guid")[0]
.attributes.get("isPermaLink")
.value,
"false",
)
def test_rss2_feed_guid_permalink_true(self):
"""
Test if the 'isPermaLink' attribute of <guid> element of an item
in the RSS feed is 'true'.
"""
response = self.client.get("/syndication/rss2/guid_ispermalink_true/")
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName("rss")[0].getElementsByTagName("channel")[0]
items = chan.getElementsByTagName("item")
for item in items:
self.assertEqual(
item.getElementsByTagName("guid")[0]
.attributes.get("isPermaLink")
.value,
"true",
)
def test_rss2_single_enclosure(self):
response = self.client.get("/syndication/rss2/single-enclosure/")
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName("rss")[0].getElementsByTagName("channel")[0]
items = chan.getElementsByTagName("item")
for item in items:
enclosures = item.getElementsByTagName("enclosure")
self.assertEqual(len(enclosures), 1)
def test_rss2_multiple_enclosures(self):
with self.assertRaisesMessage(
ValueError,
"RSS feed items may only have one enclosure, see "
"http://www.rssboard.org/rss-profile#element-channel-item-enclosure",
):
self.client.get("/syndication/rss2/multiple-enclosure/")
def test_rss091_feed(self):
"""
Test the structure and content of feeds generated by RssUserland091Feed.
"""
response = self.client.get("/syndication/rss091/")
doc = minidom.parseString(response.content)
# Making sure there's only 1 `rss` element and that the correct
# RSS version was specified.
feed_elem = doc.getElementsByTagName("rss")
self.assertEqual(len(feed_elem), 1)
feed = feed_elem[0]
self.assertEqual(feed.getAttribute("version"), "0.91")
# Making sure there's only one `channel` element w/in the
# `rss` element.
chan_elem = feed.getElementsByTagName("channel")
self.assertEqual(len(chan_elem), 1)
chan = chan_elem[0]
self.assertChildNodes(
chan,
[
"title",
"link",
"description",
"language",
"lastBuildDate",
"item",
"atom:link",
"ttl",
"copyright",
"category",
],
)
# Ensure the content of the channel is correct
self.assertChildNodeContent(
chan,
{
"title": "My blog",
"link": "http://example.com/blog/",
},
)
self.assertCategories(chan, ["python", "django"])
# Check feed_url is passed
self.assertEqual(
chan.getElementsByTagName("atom:link")[0].getAttribute("href"),
"http://example.com/syndication/rss091/",
)
items = chan.getElementsByTagName("item")
self.assertEqual(len(items), Entry.objects.count())
self.assertChildNodeContent(
items[0],
{
"title": "My first entry",
"description": "Overridden description: My first entry",
"link": "http://example.com/blog/%s/" % self.e1.pk,
},
)
for item in items:
self.assertChildNodes(item, ["title", "link", "description"])
self.assertCategories(item, [])
def test_atom_feed(self):
"""
Test the structure and content of feeds generated by Atom1Feed.
"""
response = self.client.get("/syndication/atom/")
feed = minidom.parseString(response.content).firstChild
self.assertEqual(feed.nodeName, "feed")
self.assertEqual(feed.getAttribute("xmlns"), "http://www.w3.org/2005/Atom")
self.assertChildNodes(
feed,
[
"title",
"subtitle",
"link",
"id",
"updated",
"entry",
"rights",
"category",
"author",
],
)
for link in feed.getElementsByTagName("link"):
if link.getAttribute("rel") == "self":
self.assertEqual(
link.getAttribute("href"), "http://example.com/syndication/atom/"
)
entries = feed.getElementsByTagName("entry")
self.assertEqual(len(entries), Entry.objects.count())
for entry in entries:
self.assertChildNodes(
entry,
[
"title",
"link",
"id",
"summary",
"category",
"updated",
"published",
"rights",
"author",
],
)
summary = entry.getElementsByTagName("summary")[0]
self.assertEqual(summary.getAttribute("type"), "html")
def test_atom_feed_published_and_updated_elements(self):
"""
The published and updated elements are not
the same and now adhere to RFC 4287.
"""
response = self.client.get("/syndication/atom/")
feed = minidom.parseString(response.content).firstChild
entries = feed.getElementsByTagName("entry")
published = entries[0].getElementsByTagName("published")[0].firstChild.wholeText
updated = entries[0].getElementsByTagName("updated")[0].firstChild.wholeText
self.assertNotEqual(published, updated)
def test_atom_single_enclosure(self):
response = self.client.get("/syndication/atom/single-enclosure/")
feed = minidom.parseString(response.content).firstChild
items = feed.getElementsByTagName("entry")
for item in items:
links = item.getElementsByTagName("link")
links = [link for link in links if link.getAttribute("rel") == "enclosure"]
self.assertEqual(len(links), 1)
def test_atom_multiple_enclosures(self):
response = self.client.get("/syndication/atom/multiple-enclosure/")
feed = minidom.parseString(response.content).firstChild
items = feed.getElementsByTagName("entry")
for item in items:
links = item.getElementsByTagName("link")
links = [link for link in links if link.getAttribute("rel") == "enclosure"]
self.assertEqual(len(links), 2)
def test_latest_post_date(self):
"""
Both the published and updated dates are
considered when determining the latest post date.
"""
# this feed has a `published` element with the latest date
response = self.client.get("/syndication/atom/")
feed = minidom.parseString(response.content).firstChild
updated = feed.getElementsByTagName("updated")[0].firstChild.wholeText
d = Entry.objects.latest("published").published
latest_published = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest_published)
# this feed has an `updated` element with the latest date
response = self.client.get("/syndication/latest/")
feed = minidom.parseString(response.content).firstChild
updated = feed.getElementsByTagName("updated")[0].firstChild.wholeText
d = Entry.objects.exclude(title="My last entry").latest("updated").updated
latest_updated = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest_updated)
def test_custom_feed_generator(self):
response = self.client.get("/syndication/custom/")
feed = minidom.parseString(response.content).firstChild
self.assertEqual(feed.nodeName, "feed")
self.assertEqual(feed.getAttribute("django"), "rocks")
self.assertChildNodes(
feed,
[
"title",
"subtitle",
"link",
"id",
"updated",
"entry",
"spam",
"rights",
"category",
"author",
],
)
entries = feed.getElementsByTagName("entry")
self.assertEqual(len(entries), Entry.objects.count())
for entry in entries:
self.assertEqual(entry.getAttribute("bacon"), "yum")
self.assertChildNodes(
entry,
[
"title",
"link",
"id",
"summary",
"ministry",
"rights",
"author",
"updated",
"published",
"category",
],
)
summary = entry.getElementsByTagName("summary")[0]
self.assertEqual(summary.getAttribute("type"), "html")
def test_feed_generator_language_attribute(self):
response = self.client.get("/syndication/language/")
feed = minidom.parseString(response.content).firstChild
self.assertEqual(
feed.firstChild.getElementsByTagName("language")[0].firstChild.nodeValue,
"de",
)
def test_title_escaping(self):
"""
Titles are escaped correctly in RSS feeds.
"""
response = self.client.get("/syndication/rss2/")
doc = minidom.parseString(response.content)
for item in doc.getElementsByTagName("item"):
link = item.getElementsByTagName("link")[0]
if link.firstChild.wholeText == "http://example.com/blog/4/":
title = item.getElementsByTagName("title")[0]
self.assertEqual(title.firstChild.wholeText, "A & B < C > D")
def test_naive_datetime_conversion(self):
"""
Datetimes are correctly converted to the local time zone.
"""
# Naive date times passed in get converted to the local time zone, so
# check the received zone offset against the local offset.
response = self.client.get("/syndication/naive-dates/")
doc = minidom.parseString(response.content)
updated = doc.getElementsByTagName("updated")[0].firstChild.wholeText
d = Entry.objects.latest("published").published
latest = rfc3339_date(timezone.make_aware(d, TZ))
self.assertEqual(updated, latest)
def test_aware_datetime_conversion(self):
"""
Datetimes with timezones don't get trodden on.
"""
response = self.client.get("/syndication/aware-dates/")
doc = minidom.parseString(response.content)
published = doc.getElementsByTagName("published")[0].firstChild.wholeText
self.assertEqual(published[-6:], "+00:42")
def test_feed_no_content_self_closing_tag(self):
tests = [
(Atom1Feed, "link"),
(Rss201rev2Feed, "atom:link"),
]
for feedgenerator, tag in tests:
with self.subTest(feedgenerator=feedgenerator.__name__):
feed = feedgenerator(
title="title",
link="https://example.com",
description="self closing tags test",
feed_url="https://feed.url.com",
)
doc = feed.writeString("utf-8")
self.assertIn(f'<{tag} href="https://feed.url.com" rel="self"/>', doc)
@requires_tz_support
def test_feed_last_modified_time_naive_date(self):
"""
Tests the Last-Modified header with naive publication dates.
"""
response = self.client.get("/syndication/naive-dates/")
self.assertEqual(
response.headers["Last-Modified"], "Tue, 26 Mar 2013 01:00:00 GMT"
)
def test_feed_last_modified_time(self):
"""
Tests the Last-Modified header with aware publication dates.
"""
response = self.client.get("/syndication/aware-dates/")
self.assertEqual(
response.headers["Last-Modified"], "Mon, 25 Mar 2013 19:18:00 GMT"
)
# No last-modified when feed has no item_pubdate
response = self.client.get("/syndication/no_pubdate/")
self.assertFalse(response.has_header("Last-Modified"))
def test_feed_url(self):
"""
The feed_url can be overridden.
"""
response = self.client.get("/syndication/feedurl/")
doc = minidom.parseString(response.content)
for link in doc.getElementsByTagName("link"):
if link.getAttribute("rel") == "self":
self.assertEqual(
link.getAttribute("href"), "http://example.com/customfeedurl/"
)
def test_secure_urls(self):
"""
Test URLs are prefixed with https:// when feed is requested over HTTPS.
"""
response = self.client.get(
"/syndication/rss2/",
**{
"wsgi.url_scheme": "https",
},
)
doc = minidom.parseString(response.content)
chan = doc.getElementsByTagName("channel")[0]
self.assertEqual(
chan.getElementsByTagName("link")[0].firstChild.wholeText[0:5], "https"
)
atom_link = chan.getElementsByTagName("atom:link")[0]
self.assertEqual(atom_link.getAttribute("href")[0:5], "https")
for link in doc.getElementsByTagName("link"):
if link.getAttribute("rel") == "self":
self.assertEqual(link.getAttribute("href")[0:5], "https")
def test_item_link_error(self):
"""
An ImproperlyConfigured is raised if no link could be found for the
item(s).
"""
msg = (
"Give your Article class a get_absolute_url() method, or define "
"an item_link() method in your Feed class."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/syndication/articles/")
def test_template_feed(self):
"""
The item title and description can be overridden with templates.
"""
response = self.client.get("/syndication/template/")
doc = minidom.parseString(response.content)
feed = doc.getElementsByTagName("rss")[0]
chan = feed.getElementsByTagName("channel")[0]
items = chan.getElementsByTagName("item")
self.assertChildNodeContent(
items[0],
{
"title": "Title in your templates: My first entry\n",
"description": "Description in your templates: My first entry\n",
"link": "http://example.com/blog/%s/" % self.e1.pk,
},
)
def test_template_context_feed(self):
"""
Custom context data can be passed to templates for title
and description.
"""
response = self.client.get("/syndication/template_context/")
doc = minidom.parseString(response.content)
feed = doc.getElementsByTagName("rss")[0]
chan = feed.getElementsByTagName("channel")[0]
items = chan.getElementsByTagName("item")
self.assertChildNodeContent(
items[0],
{
"title": "My first entry (foo is bar)\n",
"description": "My first entry (foo is bar)\n",
},
)
def test_add_domain(self):
"""
add_domain() prefixes domains onto the correct URLs.
"""
prefix_domain_mapping = (
(("example.com", "/foo/?arg=value"), "http://example.com/foo/?arg=value"),
(
("example.com", "/foo/?arg=value", True),
"https://example.com/foo/?arg=value",
),
(
("example.com", "http://djangoproject.com/doc/"),
"http://djangoproject.com/doc/",
),
(
("example.com", "https://djangoproject.com/doc/"),
"https://djangoproject.com/doc/",
),
(
("example.com", "mailto:[email protected]"),
"mailto:[email protected]",
),
(
("example.com", "//example.com/foo/?arg=value"),
"http://example.com/foo/?arg=value",
),
)
for prefix in prefix_domain_mapping:
with self.subTest(prefix=prefix):
self.assertEqual(views.add_domain(*prefix[0]), prefix[1])
def test_get_object(self):
response = self.client.get("/syndication/rss2/articles/%s/" % self.e1.pk)
doc = minidom.parseString(response.content)
feed = doc.getElementsByTagName("rss")[0]
chan = feed.getElementsByTagName("channel")[0]
items = chan.getElementsByTagName("item")
self.assertChildNodeContent(
items[0],
{
"comments": "/blog/%s/article/%s/comments" % (self.e1.pk, self.a1.pk),
"description": "Article description: My first article",
"link": "http://example.com/blog/%s/article/%s/"
% (self.e1.pk, self.a1.pk),
"title": "Title: My first article",
"pubDate": rfc2822_date(timezone.make_aware(self.a1.published, TZ)),
},
)
def test_get_non_existent_object(self):
response = self.client.get("/syndication/rss2/articles/0/")
self.assertEqual(response.status_code, 404)
|
48d41fb82366cf3a3d85a06afe5eeb3b23e86ce531207c449828d07005ba81ef | from django.db import models
class Entry(models.Model):
title = models.CharField(max_length=200)
updated = models.DateTimeField()
published = models.DateTimeField()
class Meta:
ordering = ("updated",)
def __str__(self):
return self.title
def get_absolute_url(self):
return "/blog/%s/" % self.pk
class Article(models.Model):
title = models.CharField(max_length=200)
entry = models.ForeignKey(Entry, models.CASCADE)
updated = models.DateTimeField()
published = models.DateTimeField()
class Meta:
ordering = ["updated"]
|
ccc8d39f8f73c2741e293ceb4d969cd43aa5aeef79259416f555727a358c60b4 | from django.contrib.syndication import views
from django.utils import feedgenerator
from django.utils.timezone import get_fixed_timezone
from .models import Article, Entry
class TestRss2Feed(views.Feed):
title = "My blog"
description = "A more thorough description of my blog."
link = "/blog/"
feed_guid = "/foo/bar/1234"
author_name = "Sally Smith"
author_email = "[email protected]"
author_link = "http://www.example.com/"
categories = ("python", "django")
feed_copyright = "Copyright (c) 2007, Sally Smith"
ttl = 600
def items(self):
return Entry.objects.all()
def item_description(self, item):
return "Overridden description: %s" % item
def item_pubdate(self, item):
return item.published
def item_updateddate(self, item):
return item.updated
def item_comments(self, item):
return "%scomments" % item.get_absolute_url()
item_author_name = "Sally Smith"
item_author_email = "[email protected]"
item_author_link = "http://www.example.com/"
item_categories = ("python", "testing")
item_copyright = "Copyright (c) 2007, Sally Smith"
class TestRss2FeedWithGuidIsPermaLinkTrue(TestRss2Feed):
def item_guid_is_permalink(self, item):
return True
class TestRss2FeedWithGuidIsPermaLinkFalse(TestRss2Feed):
def item_guid(self, item):
return str(item.pk)
def item_guid_is_permalink(self, item):
return False
class TestRss091Feed(TestRss2Feed):
feed_type = feedgenerator.RssUserland091Feed
class TestNoPubdateFeed(views.Feed):
title = "Test feed"
link = "/feed/"
def items(self):
return Entry.objects.all()
class TestAtomFeed(TestRss2Feed):
feed_type = feedgenerator.Atom1Feed
subtitle = TestRss2Feed.description
class TestLatestFeed(TestRss2Feed):
"""
A feed where the latest entry date is an `updated` element.
"""
feed_type = feedgenerator.Atom1Feed
subtitle = TestRss2Feed.description
def items(self):
return Entry.objects.exclude(title="My last entry")
class ArticlesFeed(TestRss2Feed):
"""
A feed to test no link being defined. Articles have no get_absolute_url()
method, and item_link() is not defined.
"""
def items(self):
return Article.objects.all()
class TestSingleEnclosureRSSFeed(TestRss2Feed):
"""
A feed to test that RSS feeds work with a single enclosure.
"""
def item_enclosure_url(self, item):
return "http://example.com"
def item_enclosure_size(self, item):
return 0
def item_mime_type(self, item):
return "image/png"
class TestMultipleEnclosureRSSFeed(TestRss2Feed):
"""
A feed to test that RSS feeds raise an exception with multiple enclosures.
"""
def item_enclosures(self, item):
return [
feedgenerator.Enclosure("http://example.com/hello.png", 0, "image/png"),
feedgenerator.Enclosure("http://example.com/goodbye.png", 0, "image/png"),
]
class TemplateFeed(TestRss2Feed):
"""
A feed to test defining item titles and descriptions with templates.
"""
title_template = "syndication/title.html"
description_template = "syndication/description.html"
# Defining a template overrides any item_title definition
def item_title(self):
return "Not in a template"
class TemplateContextFeed(TestRss2Feed):
"""
A feed to test custom context data in templates for title or description.
"""
title_template = "syndication/title_context.html"
description_template = "syndication/description_context.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["foo"] = "bar"
return context
class TestLanguageFeed(TestRss2Feed):
language = "de"
class TestGetObjectFeed(TestRss2Feed):
def get_object(self, request, entry_id):
return Entry.objects.get(pk=entry_id)
def items(self, obj):
return Article.objects.filter(entry=obj)
def item_link(self, item):
return "%sarticle/%s/" % (item.entry.get_absolute_url(), item.pk)
def item_comments(self, item):
return "%scomments" % self.item_link(item)
def item_description(self, item):
return "Article description: %s" % item.title
def item_title(self, item):
return "Title: %s" % item.title
class NaiveDatesFeed(TestAtomFeed):
"""
A feed with naive (non-timezone-aware) dates.
"""
def item_pubdate(self, item):
return item.published
class TZAwareDatesFeed(TestAtomFeed):
"""
A feed with timezone-aware dates.
"""
def item_pubdate(self, item):
# Provide a weird offset so that the test can know it's getting this
# specific offset and not accidentally getting on from
# settings.TIME_ZONE.
return item.published.replace(tzinfo=get_fixed_timezone(42))
class TestFeedUrlFeed(TestAtomFeed):
feed_url = "http://example.com/customfeedurl/"
class MyCustomAtom1Feed(feedgenerator.Atom1Feed):
"""
Test of a custom feed generator class.
"""
def root_attributes(self):
attrs = super().root_attributes()
attrs["django"] = "rocks"
return attrs
def add_root_elements(self, handler):
super().add_root_elements(handler)
handler.addQuickElement("spam", "eggs")
def item_attributes(self, item):
attrs = super().item_attributes(item)
attrs["bacon"] = "yum"
return attrs
def add_item_elements(self, handler, item):
super().add_item_elements(handler, item)
handler.addQuickElement("ministry", "silly walks")
class TestCustomFeed(TestAtomFeed):
feed_type = MyCustomAtom1Feed
class TestSingleEnclosureAtomFeed(TestAtomFeed):
"""
A feed to test that Atom feeds work with a single enclosure.
"""
def item_enclosure_url(self, item):
return "http://example.com"
def item_enclosure_size(self, item):
return 0
def item_mime_type(self, item):
return "image/png"
class TestMultipleEnclosureAtomFeed(TestAtomFeed):
"""
A feed to test that Atom feeds work with multiple enclosures.
"""
def item_enclosures(self, item):
return [
feedgenerator.Enclosure("http://example.com/hello.png", "0", "image/png"),
feedgenerator.Enclosure("http://example.com/goodbye.png", "0", "image/png"),
]
|
4cbffaf5f30a27d5dca6a26b2bd8fb8ce089c7ba1e01f02307362f15fbff0567 | from django.urls import path
from . import feeds
urlpatterns = [
path("syndication/rss2/", feeds.TestRss2Feed()),
path("syndication/rss2/articles/<int:entry_id>/", feeds.TestGetObjectFeed()),
path(
"syndication/rss2/guid_ispermalink_true/",
feeds.TestRss2FeedWithGuidIsPermaLinkTrue(),
),
path(
"syndication/rss2/guid_ispermalink_false/",
feeds.TestRss2FeedWithGuidIsPermaLinkFalse(),
),
path("syndication/rss091/", feeds.TestRss091Feed()),
path("syndication/no_pubdate/", feeds.TestNoPubdateFeed()),
path("syndication/atom/", feeds.TestAtomFeed()),
path("syndication/latest/", feeds.TestLatestFeed()),
path("syndication/custom/", feeds.TestCustomFeed()),
path("syndication/language/", feeds.TestLanguageFeed()),
path("syndication/naive-dates/", feeds.NaiveDatesFeed()),
path("syndication/aware-dates/", feeds.TZAwareDatesFeed()),
path("syndication/feedurl/", feeds.TestFeedUrlFeed()),
path("syndication/articles/", feeds.ArticlesFeed()),
path("syndication/template/", feeds.TemplateFeed()),
path("syndication/template_context/", feeds.TemplateContextFeed()),
path("syndication/rss2/single-enclosure/", feeds.TestSingleEnclosureRSSFeed()),
path("syndication/rss2/multiple-enclosure/", feeds.TestMultipleEnclosureRSSFeed()),
path("syndication/atom/single-enclosure/", feeds.TestSingleEnclosureAtomFeed()),
path("syndication/atom/multiple-enclosure/", feeds.TestMultipleEnclosureAtomFeed()),
]
|
0c532b59d2dbd501d98254a44bc2fa00d909ae4d951fb77daab6b554c977e1a1 | import datetime
from decimal import Decimal
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db.models import (
BooleanField,
Case,
CharField,
Count,
DateTimeField,
DecimalField,
Exists,
ExpressionWrapper,
F,
FloatField,
Func,
IntegerField,
Max,
OuterRef,
Q,
Subquery,
Sum,
Value,
When,
)
from django.db.models.expressions import RawSQL
from django.db.models.functions import Coalesce, ExtractYear, Floor, Length, Lower, Trim
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import register_lookup
from .models import (
Author,
Book,
Company,
DepartmentStore,
Employee,
Publisher,
Store,
Ticket,
)
class NonAggregateAnnotationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="Brad Dayley", age=45)
cls.a4 = Author.objects.create(name="James Bennett", age=29)
cls.a5 = Author.objects.create(name="Jeffrey Forcier", age=37)
cls.a6 = Author.objects.create(name="Paul Bissex", age=29)
cls.a7 = Author.objects.create(name="Wesley J. Chun", age=25)
cls.a8 = Author.objects.create(name="Peter Norvig", age=57)
cls.a9 = Author.objects.create(name="Stuart Russell", age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name="Apress", num_awards=3)
cls.p2 = Publisher.objects.create(name="Sams", num_awards=1)
cls.p3 = Publisher.objects.create(name="Prentice Hall", num_awards=7)
cls.p4 = Publisher.objects.create(name="Morgan Kaufmann", num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn="159059725",
name="The Definitive Guide to Django: Web Development Done Right",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6),
)
cls.b2 = Book.objects.create(
isbn="067232959",
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
rating=3.0,
price=Decimal("23.09"),
contact=cls.a3,
publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3),
)
cls.b3 = Book.objects.create(
isbn="159059996",
name="Practical Django Projects",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a4,
publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23),
)
cls.b4 = Book.objects.create(
isbn="013235613",
name="Python Web Development with Django",
pages=350,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a5,
publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3),
)
cls.b5 = Book.objects.create(
isbn="013790395",
name="Artificial Intelligence: A Modern Approach",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a8,
publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15),
)
cls.b6 = Book.objects.create(
isbn="155860191",
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a8,
publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15),
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
cls.s1 = Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
cls.s2 = Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
cls.s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30),
)
cls.s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
cls.s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
cls.s3.books.add(cls.b3, cls.b4, cls.b6)
def test_basic_annotation(self):
books = Book.objects.annotate(is_book=Value(1))
for book in books:
self.assertEqual(book.is_book, 1)
def test_basic_f_annotation(self):
books = Book.objects.annotate(another_rating=F("rating"))
for book in books:
self.assertEqual(book.another_rating, book.rating)
def test_joined_annotation(self):
books = Book.objects.select_related("publisher").annotate(
num_awards=F("publisher__num_awards")
)
for book in books:
self.assertEqual(book.num_awards, book.publisher.num_awards)
def test_joined_transformed_annotation(self):
Employee.objects.bulk_create(
[
Employee(
first_name="John",
last_name="Doe",
age=18,
store=self.s1,
salary=15000,
),
Employee(
first_name="Jane",
last_name="Jones",
age=30,
store=self.s2,
salary=30000,
),
Employee(
first_name="Jo",
last_name="Smith",
age=55,
store=self.s3,
salary=50000,
),
]
)
employees = Employee.objects.annotate(
store_opened_year=F("store__original_opening__year"),
)
for employee in employees:
self.assertEqual(
employee.store_opened_year,
employee.store.original_opening.year,
)
def test_custom_transform_annotation(self):
with register_lookup(DecimalField, Floor):
books = Book.objects.annotate(floor_price=F("price__floor"))
self.assertSequenceEqual(
books.values_list("pk", "floor_price"),
[
(self.b1.pk, 30),
(self.b2.pk, 23),
(self.b3.pk, 29),
(self.b4.pk, 29),
(self.b5.pk, 82),
(self.b6.pk, 75),
],
)
def test_chaining_transforms(self):
Company.objects.create(name=" Django Software Foundation ")
Company.objects.create(name="Yahoo")
with register_lookup(CharField, Trim), register_lookup(CharField, Length):
for expr in [Length("name__trim"), F("name__trim__length")]:
with self.subTest(expr=expr):
self.assertCountEqual(
Company.objects.annotate(length=expr).values("name", "length"),
[
{"name": " Django Software Foundation ", "length": 26},
{"name": "Yahoo", "length": 5},
],
)
def test_mixed_type_annotation_date_interval(self):
active = datetime.datetime(2015, 3, 20, 14, 0, 0)
duration = datetime.timedelta(hours=1)
expires = datetime.datetime(2015, 3, 20, 14, 0, 0) + duration
Ticket.objects.create(active_at=active, duration=duration)
t = Ticket.objects.annotate(
expires=ExpressionWrapper(
F("active_at") + F("duration"), output_field=DateTimeField()
)
).first()
self.assertEqual(t.expires, expires)
def test_mixed_type_annotation_numbers(self):
test = self.b1
b = Book.objects.annotate(
combined=ExpressionWrapper(
F("pages") + F("rating"), output_field=IntegerField()
)
).get(isbn=test.isbn)
combined = int(test.pages + test.rating)
self.assertEqual(b.combined, combined)
def test_empty_expression_annotation(self):
books = Book.objects.annotate(
selected=ExpressionWrapper(Q(pk__in=[]), output_field=BooleanField())
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(not book.selected for book in books))
books = Book.objects.annotate(
selected=ExpressionWrapper(
Q(pk__in=Book.objects.none()), output_field=BooleanField()
)
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(not book.selected for book in books))
def test_full_expression_annotation(self):
books = Book.objects.annotate(
selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()),
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(book.selected for book in books))
def test_full_expression_annotation_with_aggregation(self):
qs = Book.objects.filter(isbn="159059725").annotate(
selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()),
rating_count=Count("rating"),
)
self.assertEqual([book.rating_count for book in qs], [1])
def test_aggregate_over_full_expression_annotation(self):
qs = Book.objects.annotate(
selected=ExpressionWrapper(~Q(pk__in=[]), output_field=BooleanField()),
).aggregate(Sum("selected"))
self.assertEqual(qs["selected__sum"], Book.objects.count())
def test_empty_queryset_annotation(self):
qs = Author.objects.annotate(empty=Subquery(Author.objects.values("id").none()))
self.assertIsNone(qs.first().empty)
def test_annotate_with_aggregation(self):
books = Book.objects.annotate(is_book=Value(1), rating_count=Count("rating"))
for book in books:
self.assertEqual(book.is_book, 1)
self.assertEqual(book.rating_count, 1)
def test_combined_expression_annotation_with_aggregation(self):
book = Book.objects.annotate(
combined=ExpressionWrapper(
Value(3) * Value(4), output_field=IntegerField()
),
rating_count=Count("rating"),
).first()
self.assertEqual(book.combined, 12)
self.assertEqual(book.rating_count, 1)
def test_combined_f_expression_annotation_with_aggregation(self):
book = (
Book.objects.filter(isbn="159059725")
.annotate(
combined=ExpressionWrapper(
F("price") * F("pages"), output_field=FloatField()
),
rating_count=Count("rating"),
)
.first()
)
self.assertEqual(book.combined, 13410.0)
self.assertEqual(book.rating_count, 1)
@skipUnlessDBFeature("supports_boolean_expr_in_select_clause")
def test_q_expression_annotation_with_aggregation(self):
book = (
Book.objects.filter(isbn="159059725")
.annotate(
isnull_pubdate=ExpressionWrapper(
Q(pubdate__isnull=True),
output_field=BooleanField(),
),
rating_count=Count("rating"),
)
.first()
)
self.assertIs(book.isnull_pubdate, False)
self.assertEqual(book.rating_count, 1)
@skipUnlessDBFeature("supports_boolean_expr_in_select_clause")
def test_grouping_by_q_expression_annotation(self):
authors = (
Author.objects.annotate(
under_40=ExpressionWrapper(Q(age__lt=40), output_field=BooleanField()),
)
.values("under_40")
.annotate(
count_id=Count("id"),
)
.values("under_40", "count_id")
)
self.assertCountEqual(
authors,
[
{"under_40": False, "count_id": 3},
{"under_40": True, "count_id": 6},
],
)
def test_aggregate_over_annotation(self):
agg = Author.objects.annotate(other_age=F("age")).aggregate(
otherage_sum=Sum("other_age")
)
other_agg = Author.objects.aggregate(age_sum=Sum("age"))
self.assertEqual(agg["otherage_sum"], other_agg["age_sum"])
@skipUnlessDBFeature("can_distinct_on_fields")
def test_distinct_on_with_annotation(self):
store = Store.objects.create(
name="test store",
original_opening=datetime.datetime.now(),
friday_night_closing=datetime.time(21, 00, 00),
)
names = [
"Theodore Roosevelt",
"Eleanor Roosevelt",
"Franklin Roosevelt",
"Ned Stark",
"Catelyn Stark",
]
for name in names:
Employee.objects.create(
store=store,
first_name=name.split()[0],
last_name=name.split()[1],
age=30,
salary=2000,
)
people = Employee.objects.annotate(
name_lower=Lower("last_name"),
).distinct("name_lower")
self.assertEqual({p.last_name for p in people}, {"Stark", "Roosevelt"})
self.assertEqual(len(people), 2)
people2 = Employee.objects.annotate(
test_alias=F("store__name"),
).distinct("test_alias")
self.assertEqual(len(people2), 1)
lengths = (
Employee.objects.annotate(
name_len=Length("first_name"),
)
.distinct("name_len")
.values_list("name_len", flat=True)
)
self.assertCountEqual(lengths, [3, 7, 8])
def test_filter_annotation(self):
books = Book.objects.annotate(is_book=Value(1)).filter(is_book=1)
for book in books:
self.assertEqual(book.is_book, 1)
def test_filter_annotation_with_f(self):
books = Book.objects.annotate(other_rating=F("rating")).filter(other_rating=3.5)
for book in books:
self.assertEqual(book.other_rating, 3.5)
def test_filter_annotation_with_double_f(self):
books = Book.objects.annotate(other_rating=F("rating")).filter(
other_rating=F("rating")
)
for book in books:
self.assertEqual(book.other_rating, book.rating)
def test_filter_agg_with_double_f(self):
books = Book.objects.annotate(sum_rating=Sum("rating")).filter(
sum_rating=F("sum_rating")
)
for book in books:
self.assertEqual(book.sum_rating, book.rating)
def test_filter_wrong_annotation(self):
with self.assertRaisesMessage(
FieldError, "Cannot resolve keyword 'nope' into field."
):
list(
Book.objects.annotate(sum_rating=Sum("rating")).filter(
sum_rating=F("nope")
)
)
def test_decimal_annotation(self):
salary = Decimal(10) ** -Employee._meta.get_field("salary").decimal_places
Employee.objects.create(
first_name="Max",
last_name="Paine",
store=Store.objects.first(),
age=23,
salary=salary,
)
self.assertEqual(
Employee.objects.annotate(new_salary=F("salary") / 10).get().new_salary,
salary / 10,
)
def test_filter_decimal_annotation(self):
qs = (
Book.objects.annotate(new_price=F("price") + 1)
.filter(new_price=Decimal(31))
.values_list("new_price")
)
self.assertEqual(qs.get(), (Decimal(31),))
def test_combined_annotation_commutative(self):
book1 = Book.objects.annotate(adjusted_rating=F("rating") + 2).get(
pk=self.b1.pk
)
book2 = Book.objects.annotate(adjusted_rating=2 + F("rating")).get(
pk=self.b1.pk
)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
book1 = Book.objects.annotate(adjusted_rating=F("rating") + None).get(
pk=self.b1.pk
)
book2 = Book.objects.annotate(adjusted_rating=None + F("rating")).get(
pk=self.b1.pk
)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
def test_update_with_annotation(self):
book_preupdate = Book.objects.get(pk=self.b2.pk)
Book.objects.annotate(other_rating=F("rating") - 1).update(
rating=F("other_rating")
)
book_postupdate = Book.objects.get(pk=self.b2.pk)
self.assertEqual(book_preupdate.rating - 1, book_postupdate.rating)
def test_annotation_with_m2m(self):
books = (
Book.objects.annotate(author_age=F("authors__age"))
.filter(pk=self.b1.pk)
.order_by("author_age")
)
self.assertEqual(books[0].author_age, 34)
self.assertEqual(books[1].author_age, 35)
def test_annotation_reverse_m2m(self):
books = (
Book.objects.annotate(
store_name=F("store__name"),
)
.filter(
name="Practical Django Projects",
)
.order_by("store_name")
)
self.assertQuerysetEqual(
books,
["Amazon.com", "Books.com", "Mamma and Pappa's Books"],
lambda b: b.store_name,
)
def test_values_annotation(self):
"""
Annotations can reference fields in a values clause,
and contribute to an existing values clause.
"""
# annotate references a field in values()
qs = Book.objects.values("rating").annotate(other_rating=F("rating") - 1)
book = qs.get(pk=self.b1.pk)
self.assertEqual(book["rating"] - 1, book["other_rating"])
# filter refs the annotated value
book = qs.get(other_rating=4)
self.assertEqual(book["other_rating"], 4)
# can annotate an existing values with a new field
book = qs.annotate(other_isbn=F("isbn")).get(other_rating=4)
self.assertEqual(book["other_rating"], 4)
self.assertEqual(book["other_isbn"], "155860191")
def test_values_with_pk_annotation(self):
# annotate references a field in values() with pk
publishers = Publisher.objects.values("id", "book__rating").annotate(
total=Sum("book__rating")
)
for publisher in publishers.filter(pk=self.p1.pk):
self.assertEqual(publisher["book__rating"], publisher["total"])
@skipUnlessDBFeature("allows_group_by_pk")
def test_rawsql_group_by_collapse(self):
raw = RawSQL("SELECT MIN(id) FROM annotations_book", [])
qs = (
Author.objects.values("id")
.annotate(
min_book_id=raw,
count_friends=Count("friends"),
)
.order_by()
)
_, _, group_by = qs.query.get_compiler(using="default").pre_sql_setup()
self.assertEqual(len(group_by), 1)
self.assertNotEqual(raw, group_by[0])
def test_defer_annotation(self):
"""
Deferred attributes can be referenced by an annotation,
but they are not themselves deferred, and cannot be deferred.
"""
qs = Book.objects.defer("rating").annotate(other_rating=F("rating") - 1)
with self.assertNumQueries(2):
book = qs.get(other_rating=4)
self.assertEqual(book.rating, 5)
self.assertEqual(book.other_rating, 4)
with self.assertRaisesMessage(
FieldDoesNotExist, "Book has no field named 'other_rating'"
):
book = qs.defer("other_rating").get(other_rating=4)
def test_mti_annotations(self):
"""
Fields on an inherited model can be referenced by an
annotated field.
"""
d = DepartmentStore.objects.create(
name="Angus & Robinson",
original_opening=datetime.date(2014, 3, 8),
friday_night_closing=datetime.time(21, 00, 00),
chain="Westfield",
)
books = Book.objects.filter(rating__gt=4)
for b in books:
d.books.add(b)
qs = (
DepartmentStore.objects.annotate(
other_name=F("name"),
other_chain=F("chain"),
is_open=Value(True, BooleanField()),
book_isbn=F("books__isbn"),
)
.order_by("book_isbn")
.filter(chain="Westfield")
)
self.assertQuerysetEqual(
qs,
[
("Angus & Robinson", "Westfield", True, "155860191"),
("Angus & Robinson", "Westfield", True, "159059725"),
],
lambda d: (d.other_name, d.other_chain, d.is_open, d.book_isbn),
)
def test_null_annotation(self):
"""
Annotating None onto a model round-trips
"""
book = Book.objects.annotate(
no_value=Value(None, output_field=IntegerField())
).first()
self.assertIsNone(book.no_value)
def test_order_by_annotation(self):
authors = Author.objects.annotate(other_age=F("age")).order_by("other_age")
self.assertQuerysetEqual(
authors,
[
25,
29,
29,
34,
35,
37,
45,
46,
57,
],
lambda a: a.other_age,
)
def test_order_by_aggregate(self):
authors = (
Author.objects.values("age")
.annotate(age_count=Count("age"))
.order_by("age_count", "age")
)
self.assertQuerysetEqual(
authors,
[
(25, 1),
(34, 1),
(35, 1),
(37, 1),
(45, 1),
(46, 1),
(57, 1),
(29, 2),
],
lambda a: (a["age"], a["age_count"]),
)
def test_raw_sql_with_inherited_field(self):
DepartmentStore.objects.create(
name="Angus & Robinson",
original_opening=datetime.date(2014, 3, 8),
friday_night_closing=datetime.time(21),
chain="Westfield",
area=123,
)
tests = (
("name", "Angus & Robinson"),
("surface", 123),
("case when name='Angus & Robinson' then chain else name end", "Westfield"),
)
for sql, expected_result in tests:
with self.subTest(sql=sql):
self.assertSequenceEqual(
DepartmentStore.objects.annotate(
annotation=RawSQL(sql, ()),
).values_list("annotation", flat=True),
[expected_result],
)
def test_annotate_exists(self):
authors = Author.objects.annotate(c=Count("id")).filter(c__gt=1)
self.assertFalse(authors.exists())
def test_column_field_ordering(self):
"""
Columns are aligned in the correct order for resolve_columns. This test
will fail on MySQL if column ordering is out. Column fields should be
aligned as:
1. extra_select
2. model_fields
3. annotation_fields
4. model_related_fields
"""
store = Store.objects.first()
Employee.objects.create(
id=1,
first_name="Max",
manager=True,
last_name="Paine",
store=store,
age=23,
salary=Decimal(50000.00),
)
Employee.objects.create(
id=2,
first_name="Buffy",
manager=False,
last_name="Summers",
store=store,
age=18,
salary=Decimal(40000.00),
)
qs = (
Employee.objects.extra(select={"random_value": "42"})
.select_related("store")
.annotate(
annotated_value=Value(17),
)
)
rows = [
(1, "Max", True, 42, "Paine", 23, Decimal(50000.00), store.name, 17),
(2, "Buffy", False, 42, "Summers", 18, Decimal(40000.00), store.name, 17),
]
self.assertQuerysetEqual(
qs.order_by("id"),
rows,
lambda e: (
e.id,
e.first_name,
e.manager,
e.random_value,
e.last_name,
e.age,
e.salary,
e.store.name,
e.annotated_value,
),
)
def test_column_field_ordering_with_deferred(self):
store = Store.objects.first()
Employee.objects.create(
id=1,
first_name="Max",
manager=True,
last_name="Paine",
store=store,
age=23,
salary=Decimal(50000.00),
)
Employee.objects.create(
id=2,
first_name="Buffy",
manager=False,
last_name="Summers",
store=store,
age=18,
salary=Decimal(40000.00),
)
qs = (
Employee.objects.extra(select={"random_value": "42"})
.select_related("store")
.annotate(
annotated_value=Value(17),
)
)
rows = [
(1, "Max", True, 42, "Paine", 23, Decimal(50000.00), store.name, 17),
(2, "Buffy", False, 42, "Summers", 18, Decimal(40000.00), store.name, 17),
]
# and we respect deferred columns!
self.assertQuerysetEqual(
qs.defer("age").order_by("id"),
rows,
lambda e: (
e.id,
e.first_name,
e.manager,
e.random_value,
e.last_name,
e.age,
e.salary,
e.store.name,
e.annotated_value,
),
)
def test_custom_functions(self):
Company(
name="Apple",
motto=None,
ticker_name="APPL",
description="Beautiful Devices",
).save()
Company(
name="Django Software Foundation",
motto=None,
ticker_name=None,
description=None,
).save()
Company(
name="Google",
motto="Do No Evil",
ticker_name="GOOG",
description="Internet Company",
).save()
Company(
name="Yahoo", motto=None, ticker_name=None, description="Internet Company"
).save()
qs = Company.objects.annotate(
tagline=Func(
F("motto"),
F("ticker_name"),
F("description"),
Value("No Tag"),
function="COALESCE",
)
).order_by("name")
self.assertQuerysetEqual(
qs,
[
("Apple", "APPL"),
("Django Software Foundation", "No Tag"),
("Google", "Do No Evil"),
("Yahoo", "Internet Company"),
],
lambda c: (c.name, c.tagline),
)
def test_custom_functions_can_ref_other_functions(self):
Company(
name="Apple",
motto=None,
ticker_name="APPL",
description="Beautiful Devices",
).save()
Company(
name="Django Software Foundation",
motto=None,
ticker_name=None,
description=None,
).save()
Company(
name="Google",
motto="Do No Evil",
ticker_name="GOOG",
description="Internet Company",
).save()
Company(
name="Yahoo", motto=None, ticker_name=None, description="Internet Company"
).save()
class Lower(Func):
function = "LOWER"
qs = (
Company.objects.annotate(
tagline=Func(
F("motto"),
F("ticker_name"),
F("description"),
Value("No Tag"),
function="COALESCE",
)
)
.annotate(
tagline_lower=Lower(F("tagline")),
)
.order_by("name")
)
# LOWER function supported by:
# oracle, postgres, mysql, sqlite, sqlserver
self.assertQuerysetEqual(
qs,
[
("Apple", "APPL".lower()),
("Django Software Foundation", "No Tag".lower()),
("Google", "Do No Evil".lower()),
("Yahoo", "Internet Company".lower()),
],
lambda c: (c.name, c.tagline_lower),
)
def test_boolean_value_annotation(self):
books = Book.objects.annotate(
is_book=Value(True, output_field=BooleanField()),
is_pony=Value(False, output_field=BooleanField()),
is_none=Value(None, output_field=BooleanField(null=True)),
)
self.assertGreater(len(books), 0)
for book in books:
self.assertIs(book.is_book, True)
self.assertIs(book.is_pony, False)
self.assertIsNone(book.is_none)
def test_annotation_in_f_grouped_by_annotation(self):
qs = (
Publisher.objects.annotate(multiplier=Value(3))
# group by option => sum of value * multiplier
.values("name")
.annotate(multiplied_value_sum=Sum(F("multiplier") * F("num_awards")))
.order_by()
)
self.assertCountEqual(
qs,
[
{"multiplied_value_sum": 9, "name": "Apress"},
{"multiplied_value_sum": 0, "name": "Jonno's House of Books"},
{"multiplied_value_sum": 27, "name": "Morgan Kaufmann"},
{"multiplied_value_sum": 21, "name": "Prentice Hall"},
{"multiplied_value_sum": 3, "name": "Sams"},
],
)
def test_arguments_must_be_expressions(self):
msg = "QuerySet.annotate() received non-expression(s): %s."
with self.assertRaisesMessage(TypeError, msg % BooleanField()):
Book.objects.annotate(BooleanField())
with self.assertRaisesMessage(TypeError, msg % True):
Book.objects.annotate(is_book=True)
with self.assertRaisesMessage(
TypeError, msg % ", ".join([str(BooleanField()), "True"])
):
Book.objects.annotate(BooleanField(), Value(False), is_book=True)
def test_chaining_annotation_filter_with_m2m(self):
qs = (
Author.objects.filter(
name="Adrian Holovaty",
friends__age=35,
)
.annotate(
jacob_name=F("friends__name"),
)
.filter(
friends__age=29,
)
.annotate(
james_name=F("friends__name"),
)
.values("jacob_name", "james_name")
)
self.assertCountEqual(
qs,
[{"jacob_name": "Jacob Kaplan-Moss", "james_name": "James Bennett"}],
)
def test_annotation_filter_with_subquery(self):
long_books_qs = (
Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=400,
)
.values("publisher")
.annotate(count=Count("pk"))
.values("count")
)
publisher_books_qs = (
Publisher.objects.annotate(
total_books=Count("book"),
)
.filter(
total_books=Subquery(long_books_qs, output_field=IntegerField()),
)
.values("name")
)
self.assertCountEqual(
publisher_books_qs, [{"name": "Sams"}, {"name": "Morgan Kaufmann"}]
)
def test_annotation_exists_aggregate_values_chaining(self):
qs = (
Book.objects.values("publisher")
.annotate(
has_authors=Exists(
Book.authors.through.objects.filter(book=OuterRef("pk"))
),
max_pubdate=Max("pubdate"),
)
.values_list("max_pubdate", flat=True)
.order_by("max_pubdate")
)
self.assertCountEqual(
qs,
[
datetime.date(1991, 10, 15),
datetime.date(2008, 3, 3),
datetime.date(2008, 6, 23),
datetime.date(2008, 11, 3),
],
)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_annotation_subquery_and_aggregate_values_chaining(self):
qs = (
Book.objects.annotate(pub_year=ExtractYear("pubdate"))
.values("pub_year")
.annotate(
top_rating=Subquery(
Book.objects.filter(pubdate__year=OuterRef("pub_year"))
.order_by("-rating")
.values("rating")[:1]
),
total_pages=Sum("pages"),
)
.values("pub_year", "total_pages", "top_rating")
)
self.assertCountEqual(
qs,
[
{"pub_year": 1991, "top_rating": 5.0, "total_pages": 946},
{"pub_year": 1995, "top_rating": 4.0, "total_pages": 1132},
{"pub_year": 2007, "top_rating": 4.5, "total_pages": 447},
{"pub_year": 2008, "top_rating": 4.0, "total_pages": 1178},
],
)
def test_annotation_subquery_outerref_transform(self):
qs = Book.objects.annotate(
top_rating_year=Subquery(
Book.objects.filter(pubdate__year=OuterRef("pubdate__year"))
.order_by("-rating")
.values("rating")[:1]
),
).values("pubdate__year", "top_rating_year")
self.assertCountEqual(
qs,
[
{"pubdate__year": 1991, "top_rating_year": 5.0},
{"pubdate__year": 1995, "top_rating_year": 4.0},
{"pubdate__year": 2007, "top_rating_year": 4.5},
{"pubdate__year": 2008, "top_rating_year": 4.0},
{"pubdate__year": 2008, "top_rating_year": 4.0},
{"pubdate__year": 2008, "top_rating_year": 4.0},
],
)
def test_annotation_aggregate_with_m2o(self):
qs = (
Author.objects.filter(age__lt=30)
.annotate(
max_pages=Case(
When(book_contact_set__isnull=True, then=Value(0)),
default=Max(F("book__pages")),
),
)
.values("name", "max_pages")
)
self.assertCountEqual(
qs,
[
{"name": "James Bennett", "max_pages": 300},
{"name": "Paul Bissex", "max_pages": 0},
{"name": "Wesley J. Chun", "max_pages": 0},
],
)
class AliasTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="James Bennett", age=34)
cls.a4 = Author.objects.create(name="Peter Norvig", age=57)
cls.a5 = Author.objects.create(name="Stuart Russell", age=46)
p1 = Publisher.objects.create(name="Apress", num_awards=3)
cls.b1 = Book.objects.create(
isbn="159059725",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=p1,
pubdate=datetime.date(2007, 12, 6),
name="The Definitive Guide to Django: Web Development Done Right",
)
cls.b2 = Book.objects.create(
isbn="159059996",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a3,
publisher=p1,
pubdate=datetime.date(2008, 6, 23),
name="Practical Django Projects",
)
cls.b3 = Book.objects.create(
isbn="013790395",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a4,
publisher=p1,
pubdate=datetime.date(1995, 1, 15),
name="Artificial Intelligence: A Modern Approach",
)
cls.b4 = Book.objects.create(
isbn="155860191",
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a4,
publisher=p1,
pubdate=datetime.date(1991, 10, 15),
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4, cls.a5)
cls.b4.authors.add(cls.a4)
Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
def test_basic_alias(self):
qs = Book.objects.alias(is_book=Value(1))
self.assertIs(hasattr(qs.first(), "is_book"), False)
def test_basic_alias_annotation(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
).annotate(is_book=F("is_book_alias"))
self.assertIs(hasattr(qs.first(), "is_book_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_basic_alias_f_annotation(self):
qs = Book.objects.alias(another_rating_alias=F("rating")).annotate(
another_rating=F("another_rating_alias")
)
self.assertIs(hasattr(qs.first(), "another_rating_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.another_rating, book.rating)
def test_basic_alias_f_transform_annotation(self):
qs = Book.objects.alias(
pubdate_alias=F("pubdate"),
).annotate(pubdate_year=F("pubdate_alias__year"))
self.assertIs(hasattr(qs.first(), "pubdate_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.pubdate_year, book.pubdate.year)
def test_alias_after_annotation(self):
qs = Book.objects.annotate(
is_book=Value(1),
).alias(is_book_alias=F("is_book"))
book = qs.first()
self.assertIs(hasattr(book, "is_book"), True)
self.assertIs(hasattr(book, "is_book_alias"), False)
def test_overwrite_annotation_with_alias(self):
qs = Book.objects.annotate(is_book=Value(1)).alias(is_book=F("is_book"))
self.assertIs(hasattr(qs.first(), "is_book"), False)
def test_overwrite_alias_with_annotation(self):
qs = Book.objects.alias(is_book=Value(1)).annotate(is_book=F("is_book"))
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_alias_annotation_expression(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
).annotate(is_book=Coalesce("is_book_alias", 0))
self.assertIs(hasattr(qs.first(), "is_book_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
def test_alias_default_alias_expression(self):
qs = Author.objects.alias(
Sum("book__pages"),
).filter(book__pages__sum__gt=2000)
self.assertIs(hasattr(qs.first(), "book__pages__sum"), False)
self.assertSequenceEqual(qs, [self.a4])
def test_joined_alias_annotation(self):
qs = (
Book.objects.select_related("publisher")
.alias(
num_awards_alias=F("publisher__num_awards"),
)
.annotate(num_awards=F("num_awards_alias"))
)
self.assertIs(hasattr(qs.first(), "num_awards_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.num_awards, book.publisher.num_awards)
def test_alias_annotate_with_aggregation(self):
qs = Book.objects.alias(
is_book_alias=Value(1),
rating_count_alias=Count("rating"),
).annotate(
is_book=F("is_book_alias"),
rating_count=F("rating_count_alias"),
)
book = qs.first()
self.assertIs(hasattr(book, "is_book_alias"), False)
self.assertIs(hasattr(book, "rating_count_alias"), False)
for book in qs:
with self.subTest(book=book):
self.assertEqual(book.is_book, 1)
self.assertEqual(book.rating_count, 1)
def test_filter_alias_with_f(self):
qs = Book.objects.alias(
other_rating=F("rating"),
).filter(other_rating=4.5)
self.assertIs(hasattr(qs.first(), "other_rating"), False)
self.assertSequenceEqual(qs, [self.b1])
def test_filter_alias_with_double_f(self):
qs = Book.objects.alias(
other_rating=F("rating"),
).filter(other_rating=F("rating"))
self.assertIs(hasattr(qs.first(), "other_rating"), False)
self.assertEqual(qs.count(), Book.objects.count())
def test_filter_alias_agg_with_double_f(self):
qs = Book.objects.alias(
sum_rating=Sum("rating"),
).filter(sum_rating=F("sum_rating"))
self.assertIs(hasattr(qs.first(), "sum_rating"), False)
self.assertEqual(qs.count(), Book.objects.count())
def test_update_with_alias(self):
Book.objects.alias(
other_rating=F("rating") - 1,
).update(rating=F("other_rating"))
self.b1.refresh_from_db()
self.assertEqual(self.b1.rating, 3.5)
def test_order_by_alias(self):
qs = Author.objects.alias(other_age=F("age")).order_by("other_age")
self.assertIs(hasattr(qs.first(), "other_age"), False)
self.assertQuerysetEqual(qs, [34, 34, 35, 46, 57], lambda a: a.age)
def test_order_by_alias_aggregate(self):
qs = (
Author.objects.values("age")
.alias(age_count=Count("age"))
.order_by("age_count", "age")
)
self.assertIs(hasattr(qs.first(), "age_count"), False)
self.assertQuerysetEqual(qs, [35, 46, 57, 34], lambda a: a["age"])
def test_dates_alias(self):
qs = Book.objects.alias(
pubdate_alias=F("pubdate"),
).dates("pubdate_alias", "month")
self.assertCountEqual(
qs,
[
datetime.date(1991, 10, 1),
datetime.date(1995, 1, 1),
datetime.date(2007, 12, 1),
datetime.date(2008, 6, 1),
],
)
def test_datetimes_alias(self):
qs = Store.objects.alias(
original_opening_alias=F("original_opening"),
).datetimes("original_opening_alias", "year")
self.assertCountEqual(
qs,
[
datetime.datetime(1994, 1, 1),
datetime.datetime(2001, 1, 1),
],
)
def test_aggregate_alias(self):
msg = (
"Cannot aggregate over the 'other_age' alias. Use annotate() to promote it."
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.alias(
other_age=F("age"),
).aggregate(otherage_sum=Sum("other_age"))
def test_defer_only_alias(self):
qs = Book.objects.alias(rating_alias=F("rating") - 1)
msg = "Book has no field named 'rating_alias'"
for operation in ["defer", "only"]:
with self.subTest(operation=operation):
with self.assertRaisesMessage(FieldDoesNotExist, msg):
getattr(qs, operation)("rating_alias").first()
@skipUnlessDBFeature("can_distinct_on_fields")
def test_distinct_on_alias(self):
qs = Book.objects.alias(rating_alias=F("rating") - 1)
msg = "Cannot resolve keyword 'rating_alias' into field."
with self.assertRaisesMessage(FieldError, msg):
qs.distinct("rating_alias").first()
def test_values_alias(self):
qs = Book.objects.alias(rating_alias=F("rating") - 1)
msg = "Cannot select the 'rating_alias' alias. Use annotate() to promote it."
for operation in ["values", "values_list"]:
with self.subTest(operation=operation):
with self.assertRaisesMessage(FieldError, msg):
getattr(qs, operation)("rating_alias")
|
47212d8519edae8b8fc0dcd85943c7682f1e94d69e3a1db34484c499bbe4d3eb | from django.db import models
class Author(models.Model):
name = models.CharField(max_length=100)
age = models.IntegerField()
friends = models.ManyToManyField("self", blank=True)
class Publisher(models.Model):
name = models.CharField(max_length=255)
num_awards = models.IntegerField()
class Book(models.Model):
isbn = models.CharField(max_length=9)
name = models.CharField(max_length=255)
pages = models.IntegerField()
rating = models.FloatField()
price = models.DecimalField(decimal_places=2, max_digits=6)
authors = models.ManyToManyField(Author)
contact = models.ForeignKey(Author, models.CASCADE, related_name="book_contact_set")
publisher = models.ForeignKey(Publisher, models.CASCADE)
pubdate = models.DateField()
class Store(models.Model):
name = models.CharField(max_length=255)
books = models.ManyToManyField(Book)
original_opening = models.DateTimeField()
friday_night_closing = models.TimeField()
area = models.IntegerField(null=True, db_column="surface")
class DepartmentStore(Store):
chain = models.CharField(max_length=255)
class Employee(models.Model):
# The order of these fields matter, do not change. Certain backends
# rely on field ordering to perform database conversions, and this
# model helps to test that.
first_name = models.CharField(max_length=20)
manager = models.BooleanField(default=False)
last_name = models.CharField(max_length=20)
store = models.ForeignKey(Store, models.CASCADE)
age = models.IntegerField()
salary = models.DecimalField(max_digits=8, decimal_places=2)
class Company(models.Model):
name = models.CharField(max_length=200)
motto = models.CharField(max_length=200, null=True, blank=True)
ticker_name = models.CharField(max_length=10, null=True, blank=True)
description = models.CharField(max_length=200, null=True, blank=True)
class Ticket(models.Model):
active_at = models.DateTimeField()
duration = models.DurationField()
|
0d8e6cb14d429158ea259140ffb9dc7d6da40a89cbbd40a5eac72823e437ff93 | from django.db import IntegrityError, transaction
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from .fields import MyWrapper
from .models import Bar, Business, CustomAutoFieldModel, Employee, Foo
class BasicCustomPKTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.dan = Employee.objects.create(
employee_code=123,
first_name="Dan",
last_name="Jones",
)
cls.fran = Employee.objects.create(
employee_code=456,
first_name="Fran",
last_name="Bones",
)
cls.business = Business.objects.create(name="Sears")
cls.business.employees.add(cls.dan, cls.fran)
def test_querysets(self):
"""
Both pk and custom attribute_name can be used in filter and friends
"""
self.assertQuerysetEqual(
Employee.objects.filter(pk=123),
[
"Dan Jones",
],
str,
)
self.assertQuerysetEqual(
Employee.objects.filter(employee_code=123),
[
"Dan Jones",
],
str,
)
self.assertQuerysetEqual(
Employee.objects.filter(pk__in=[123, 456]),
[
"Fran Bones",
"Dan Jones",
],
str,
)
self.assertQuerysetEqual(
Employee.objects.all(),
[
"Fran Bones",
"Dan Jones",
],
str,
)
self.assertQuerysetEqual(
Business.objects.filter(name="Sears"), ["Sears"], lambda b: b.name
)
self.assertQuerysetEqual(
Business.objects.filter(pk="Sears"),
[
"Sears",
],
lambda b: b.name,
)
def test_querysets_related_name(self):
"""
Custom pk doesn't affect related_name based lookups
"""
self.assertQuerysetEqual(
self.business.employees.all(),
[
"Fran Bones",
"Dan Jones",
],
str,
)
self.assertQuerysetEqual(
self.fran.business_set.all(),
[
"Sears",
],
lambda b: b.name,
)
def test_querysets_relational(self):
"""
Queries across tables, involving primary key
"""
self.assertQuerysetEqual(
Employee.objects.filter(business__name="Sears"),
[
"Fran Bones",
"Dan Jones",
],
str,
)
self.assertQuerysetEqual(
Employee.objects.filter(business__pk="Sears"),
[
"Fran Bones",
"Dan Jones",
],
str,
)
self.assertQuerysetEqual(
Business.objects.filter(employees__employee_code=123),
[
"Sears",
],
lambda b: b.name,
)
self.assertQuerysetEqual(
Business.objects.filter(employees__pk=123),
[
"Sears",
],
lambda b: b.name,
)
self.assertQuerysetEqual(
Business.objects.filter(employees__first_name__startswith="Fran"),
[
"Sears",
],
lambda b: b.name,
)
def test_get(self):
"""
Get can accept pk or the real attribute name
"""
self.assertEqual(Employee.objects.get(pk=123), self.dan)
self.assertEqual(Employee.objects.get(pk=456), self.fran)
with self.assertRaises(Employee.DoesNotExist):
Employee.objects.get(pk=42)
# Use the name of the primary key, rather than pk.
self.assertEqual(Employee.objects.get(employee_code=123), self.dan)
def test_pk_attributes(self):
"""
pk and attribute name are available on the model
No default id attribute is added
"""
# pk can be used as a substitute for the primary key.
# The primary key can be accessed via the pk property on the model.
e = Employee.objects.get(pk=123)
self.assertEqual(e.pk, 123)
# Or we can use the real attribute name for the primary key:
self.assertEqual(e.employee_code, 123)
with self.assertRaisesMessage(
AttributeError, "'Employee' object has no attribute 'id'"
):
e.id
def test_in_bulk(self):
"""
Custom pks work with in_bulk, both for integer and non-integer types
"""
emps = Employee.objects.in_bulk([123, 456])
self.assertEqual(emps[123], self.dan)
self.assertEqual(
Business.objects.in_bulk(["Sears"]),
{
"Sears": self.business,
},
)
def test_save(self):
"""
custom pks do not affect save
"""
fran = Employee.objects.get(pk=456)
fran.last_name = "Jones"
fran.save()
self.assertQuerysetEqual(
Employee.objects.filter(last_name="Jones"),
[
"Dan Jones",
"Fran Jones",
],
str,
)
class CustomPKTests(TestCase):
def test_custom_pk_create(self):
"""
New objects can be created both with pk and the custom name
"""
Employee.objects.create(employee_code=1234, first_name="Foo", last_name="Bar")
Employee.objects.create(pk=1235, first_name="Foo", last_name="Baz")
Business.objects.create(name="Bears")
Business.objects.create(pk="Tears")
def test_unicode_pk(self):
# Primary key may be Unicode string.
Business.objects.create(name="jaźń")
def test_unique_pk(self):
# The primary key must also be unique, so trying to create a new object
# with the same primary key will fail.
Employee.objects.create(
employee_code=123, first_name="Frank", last_name="Jones"
)
with self.assertRaises(IntegrityError):
with transaction.atomic():
Employee.objects.create(
employee_code=123, first_name="Fred", last_name="Jones"
)
def test_zero_non_autoincrement_pk(self):
Employee.objects.create(employee_code=0, first_name="Frank", last_name="Jones")
employee = Employee.objects.get(pk=0)
self.assertEqual(employee.employee_code, 0)
def test_custom_field_pk(self):
# Regression for #10785 -- Custom fields can be used for primary keys.
new_bar = Bar.objects.create()
new_foo = Foo.objects.create(bar=new_bar)
f = Foo.objects.get(bar=new_bar.pk)
self.assertEqual(f, new_foo)
self.assertEqual(f.bar, new_bar)
f = Foo.objects.get(bar=new_bar)
self.assertEqual(f, new_foo),
self.assertEqual(f.bar, new_bar)
# SQLite lets objects be saved with an empty primary key, even though an
# integer is expected. So we can't check for an error being raised in that
# case for SQLite. Remove it from the suite for this next bit.
@skipIfDBFeature("supports_unspecified_pk")
def test_required_pk(self):
# The primary key must be specified, so an error is raised if you
# try to create an object without it.
with self.assertRaises(IntegrityError):
with transaction.atomic():
Employee.objects.create(first_name="Tom", last_name="Smith")
def test_auto_field_subclass_create(self):
obj = CustomAutoFieldModel.objects.create()
self.assertIsInstance(obj.id, MyWrapper)
@skipUnlessDBFeature("can_return_rows_from_bulk_insert")
def test_auto_field_subclass_bulk_create(self):
obj = CustomAutoFieldModel()
CustomAutoFieldModel.objects.bulk_create([obj])
self.assertIsInstance(obj.id, MyWrapper)
|
81097c3a6e44862816e482bd2919748a4a67d5f58e011cf6d90b8cb133e7ea32 | """
Using a custom primary key
By default, Django adds an ``"id"`` field to each model. But you can override
this behavior by explicitly adding ``primary_key=True`` to a field.
"""
from django.db import models
from .fields import MyAutoField, MyWrapperField
class Employee(models.Model):
employee_code = models.IntegerField(primary_key=True, db_column="code")
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
class Meta:
ordering = ("last_name", "first_name")
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
class Business(models.Model):
name = models.CharField(max_length=20, primary_key=True)
employees = models.ManyToManyField(Employee)
class Meta:
verbose_name_plural = "businesses"
class Bar(models.Model):
id = MyWrapperField(primary_key=True, db_index=True)
class Foo(models.Model):
bar = models.ForeignKey(Bar, models.CASCADE)
class CustomAutoFieldModel(models.Model):
id = MyAutoField(primary_key=True)
|
597d171969f3a18ee07541201393fccb2ec48e7ead2e5e8acbd5c0d54ed721ea | import random
import string
from django.db import models
class MyWrapper:
def __init__(self, value):
self.value = value
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.value)
def __str__(self):
return self.value
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.value == other.value
return self.value == other
class MyWrapperField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs["max_length"] = 10
super().__init__(*args, **kwargs)
def pre_save(self, instance, add):
value = getattr(instance, self.attname, None)
if not value:
value = MyWrapper("".join(random.sample(string.ascii_lowercase, 10)))
setattr(instance, self.attname, value)
return value
def to_python(self, value):
if not value:
return
if not isinstance(value, MyWrapper):
value = MyWrapper(value)
return value
def from_db_value(self, value, expression, connection):
if not value:
return
return MyWrapper(value)
def get_db_prep_save(self, value, connection):
if not value:
return
if isinstance(value, MyWrapper):
return str(value)
return value
def get_db_prep_value(self, value, connection, prepared=False):
if not value:
return
if isinstance(value, MyWrapper):
return str(value)
return value
class MyAutoField(models.BigAutoField):
def from_db_value(self, value, expression, connection):
if value is None:
return None
return MyWrapper(value)
def get_prep_value(self, value):
if value is None:
return None
return int(value)
|
d2598395b32117c4dd0f48fb4e011c31dc3e58e2f61b77a123af458e3f5a5838 | import time
import traceback
from datetime import date, datetime, timedelta
from threading import Thread
from django.core.exceptions import FieldError
from django.db import DatabaseError, IntegrityError, connection
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from django.utils.functional import lazy
from .models import (
Author,
Book,
DefaultPerson,
ManualPrimaryKeyTest,
Person,
Profile,
Publisher,
Tag,
Thing,
)
class GetOrCreateTests(TestCase):
@classmethod
def setUpTestData(cls):
Person.objects.create(
first_name="John", last_name="Lennon", birthday=date(1940, 10, 9)
)
def test_get_or_create_method_with_get(self):
created = Person.objects.get_or_create(
first_name="John",
last_name="Lennon",
defaults={"birthday": date(1940, 10, 9)},
)[1]
self.assertFalse(created)
self.assertEqual(Person.objects.count(), 1)
def test_get_or_create_method_with_create(self):
created = Person.objects.get_or_create(
first_name="George",
last_name="Harrison",
defaults={"birthday": date(1943, 2, 25)},
)[1]
self.assertTrue(created)
self.assertEqual(Person.objects.count(), 2)
def test_get_or_create_redundant_instance(self):
"""
If we execute the exact same statement twice, the second time,
it won't create a Person.
"""
Person.objects.get_or_create(
first_name="George",
last_name="Harrison",
defaults={"birthday": date(1943, 2, 25)},
)
created = Person.objects.get_or_create(
first_name="George",
last_name="Harrison",
defaults={"birthday": date(1943, 2, 25)},
)[1]
self.assertFalse(created)
self.assertEqual(Person.objects.count(), 2)
def test_get_or_create_invalid_params(self):
"""
If you don't specify a value or default value for all required
fields, you will get an error.
"""
with self.assertRaises(IntegrityError):
Person.objects.get_or_create(first_name="Tom", last_name="Smith")
def test_get_or_create_with_pk_property(self):
"""
Using the pk property of a model is allowed.
"""
Thing.objects.get_or_create(pk=1)
def test_get_or_create_with_model_property_defaults(self):
"""Using a property with a setter implemented is allowed."""
t, _ = Thing.objects.get_or_create(
defaults={"capitalized_name_property": "annie"}, pk=1
)
self.assertEqual(t.name, "Annie")
def test_get_or_create_on_related_manager(self):
p = Publisher.objects.create(name="Acme Publishing")
# Create a book through the publisher.
book, created = p.books.get_or_create(name="The Book of Ed & Fred")
self.assertTrue(created)
# The publisher should have one book.
self.assertEqual(p.books.count(), 1)
# Try get_or_create again, this time nothing should be created.
book, created = p.books.get_or_create(name="The Book of Ed & Fred")
self.assertFalse(created)
# And the publisher should still have one book.
self.assertEqual(p.books.count(), 1)
# Add an author to the book.
ed, created = book.authors.get_or_create(name="Ed")
self.assertTrue(created)
# The book should have one author.
self.assertEqual(book.authors.count(), 1)
# Try get_or_create again, this time nothing should be created.
ed, created = book.authors.get_or_create(name="Ed")
self.assertFalse(created)
# And the book should still have one author.
self.assertEqual(book.authors.count(), 1)
# Add a second author to the book.
fred, created = book.authors.get_or_create(name="Fred")
self.assertTrue(created)
# The book should have two authors now.
self.assertEqual(book.authors.count(), 2)
# Create an Author not tied to any books.
Author.objects.create(name="Ted")
# There should be three Authors in total. The book object should have two.
self.assertEqual(Author.objects.count(), 3)
self.assertEqual(book.authors.count(), 2)
# Try creating a book through an author.
_, created = ed.books.get_or_create(name="Ed's Recipes", publisher=p)
self.assertTrue(created)
# Now Ed has two Books, Fred just one.
self.assertEqual(ed.books.count(), 2)
self.assertEqual(fred.books.count(), 1)
# Use the publisher's primary key value instead of a model instance.
_, created = ed.books.get_or_create(
name="The Great Book of Ed", publisher_id=p.id
)
self.assertTrue(created)
# Try get_or_create again, this time nothing should be created.
_, created = ed.books.get_or_create(
name="The Great Book of Ed", publisher_id=p.id
)
self.assertFalse(created)
# The publisher should have three books.
self.assertEqual(p.books.count(), 3)
def test_defaults_exact(self):
"""
If you have a field named defaults and want to use it as an exact
lookup, you need to use 'defaults__exact'.
"""
obj, created = Person.objects.get_or_create(
first_name="George",
last_name="Harrison",
defaults__exact="testing",
defaults={
"birthday": date(1943, 2, 25),
"defaults": "testing",
},
)
self.assertTrue(created)
self.assertEqual(obj.defaults, "testing")
obj2, created = Person.objects.get_or_create(
first_name="George",
last_name="Harrison",
defaults__exact="testing",
defaults={
"birthday": date(1943, 2, 25),
"defaults": "testing",
},
)
self.assertFalse(created)
self.assertEqual(obj, obj2)
def test_callable_defaults(self):
"""
Callables in `defaults` are evaluated if the instance is created.
"""
obj, created = Person.objects.get_or_create(
first_name="George",
defaults={"last_name": "Harrison", "birthday": lambda: date(1943, 2, 25)},
)
self.assertTrue(created)
self.assertEqual(date(1943, 2, 25), obj.birthday)
def test_callable_defaults_not_called(self):
def raise_exception():
raise AssertionError
obj, created = Person.objects.get_or_create(
first_name="John",
last_name="Lennon",
defaults={"birthday": lambda: raise_exception()},
)
def test_defaults_not_evaluated_unless_needed(self):
"""`defaults` aren't evaluated if the instance isn't created."""
def raise_exception():
raise AssertionError
obj, created = Person.objects.get_or_create(
first_name="John",
defaults=lazy(raise_exception, object)(),
)
self.assertFalse(created)
class GetOrCreateTestsWithManualPKs(TestCase):
@classmethod
def setUpTestData(cls):
ManualPrimaryKeyTest.objects.create(id=1, data="Original")
def test_create_with_duplicate_primary_key(self):
"""
If you specify an existing primary key, but different other fields,
then you will get an error and data will not be updated.
"""
with self.assertRaises(IntegrityError):
ManualPrimaryKeyTest.objects.get_or_create(id=1, data="Different")
self.assertEqual(ManualPrimaryKeyTest.objects.get(id=1).data, "Original")
def test_get_or_create_raises_IntegrityError_plus_traceback(self):
"""
get_or_create should raise IntegrityErrors with the full traceback.
This is tested by checking that a known method call is in the traceback.
We cannot use assertRaises here because we need to inspect
the actual traceback. Refs #16340.
"""
try:
ManualPrimaryKeyTest.objects.get_or_create(id=1, data="Different")
except IntegrityError:
formatted_traceback = traceback.format_exc()
self.assertIn("obj.save", formatted_traceback)
def test_savepoint_rollback(self):
"""
The database connection is still usable after a DatabaseError in
get_or_create() (#20463).
"""
Tag.objects.create(text="foo")
with self.assertRaises(DatabaseError):
# pk 123456789 doesn't exist, so the tag object will be created.
# Saving triggers a unique constraint violation on 'text'.
Tag.objects.get_or_create(pk=123456789, defaults={"text": "foo"})
# Tag objects can be created after the error.
Tag.objects.create(text="bar")
def test_get_or_create_empty(self):
"""
If all the attributes on a model have defaults, get_or_create() doesn't
require any arguments.
"""
DefaultPerson.objects.get_or_create()
class GetOrCreateTransactionTests(TransactionTestCase):
available_apps = ["get_or_create"]
def test_get_or_create_integrityerror(self):
"""
Regression test for #15117. Requires a TransactionTestCase on
databases that delay integrity checks until the end of transactions,
otherwise the exception is never raised.
"""
try:
Profile.objects.get_or_create(person=Person(id=1))
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
class GetOrCreateThroughManyToMany(TestCase):
def test_get_get_or_create(self):
tag = Tag.objects.create(text="foo")
a_thing = Thing.objects.create(name="a")
a_thing.tags.add(tag)
obj, created = a_thing.tags.get_or_create(text="foo")
self.assertFalse(created)
self.assertEqual(obj.pk, tag.pk)
def test_create_get_or_create(self):
a_thing = Thing.objects.create(name="a")
obj, created = a_thing.tags.get_or_create(text="foo")
self.assertTrue(created)
self.assertEqual(obj.text, "foo")
self.assertIn(obj, a_thing.tags.all())
def test_something(self):
Tag.objects.create(text="foo")
a_thing = Thing.objects.create(name="a")
with self.assertRaises(IntegrityError):
a_thing.tags.get_or_create(text="foo")
class UpdateOrCreateTests(TestCase):
def test_update(self):
Person.objects.create(
first_name="John", last_name="Lennon", birthday=date(1940, 10, 9)
)
p, created = Person.objects.update_or_create(
first_name="John",
last_name="Lennon",
defaults={"birthday": date(1940, 10, 10)},
)
self.assertFalse(created)
self.assertEqual(p.first_name, "John")
self.assertEqual(p.last_name, "Lennon")
self.assertEqual(p.birthday, date(1940, 10, 10))
def test_create(self):
p, created = Person.objects.update_or_create(
first_name="John",
last_name="Lennon",
defaults={"birthday": date(1940, 10, 10)},
)
self.assertTrue(created)
self.assertEqual(p.first_name, "John")
self.assertEqual(p.last_name, "Lennon")
self.assertEqual(p.birthday, date(1940, 10, 10))
def test_create_twice(self):
params = {
"first_name": "John",
"last_name": "Lennon",
"birthday": date(1940, 10, 10),
}
Person.objects.update_or_create(**params)
# If we execute the exact same statement, it won't create a Person.
p, created = Person.objects.update_or_create(**params)
self.assertFalse(created)
def test_integrity(self):
"""
If you don't specify a value or default value for all required
fields, you will get an error.
"""
with self.assertRaises(IntegrityError):
Person.objects.update_or_create(first_name="Tom", last_name="Smith")
def test_manual_primary_key_test(self):
"""
If you specify an existing primary key, but different other fields,
then you will get an error and data will not be updated.
"""
ManualPrimaryKeyTest.objects.create(id=1, data="Original")
with self.assertRaises(IntegrityError):
ManualPrimaryKeyTest.objects.update_or_create(id=1, data="Different")
self.assertEqual(ManualPrimaryKeyTest.objects.get(id=1).data, "Original")
def test_with_pk_property(self):
"""
Using the pk property of a model is allowed.
"""
Thing.objects.update_or_create(pk=1)
def test_update_or_create_with_model_property_defaults(self):
"""Using a property with a setter implemented is allowed."""
t, _ = Thing.objects.get_or_create(
defaults={"capitalized_name_property": "annie"}, pk=1
)
self.assertEqual(t.name, "Annie")
def test_error_contains_full_traceback(self):
"""
update_or_create should raise IntegrityErrors with the full traceback.
This is tested by checking that a known method call is in the traceback.
We cannot use assertRaises/assertRaises here because we need to inspect
the actual traceback. Refs #16340.
"""
try:
ManualPrimaryKeyTest.objects.update_or_create(id=1, data="Different")
except IntegrityError:
formatted_traceback = traceback.format_exc()
self.assertIn("obj.save", formatted_traceback)
def test_create_with_related_manager(self):
"""
Should be able to use update_or_create from the related manager to
create a book. Refs #23611.
"""
p = Publisher.objects.create(name="Acme Publishing")
book, created = p.books.update_or_create(name="The Book of Ed & Fred")
self.assertTrue(created)
self.assertEqual(p.books.count(), 1)
def test_update_with_related_manager(self):
"""
Should be able to use update_or_create from the related manager to
update a book. Refs #23611.
"""
p = Publisher.objects.create(name="Acme Publishing")
book = Book.objects.create(name="The Book of Ed & Fred", publisher=p)
self.assertEqual(p.books.count(), 1)
name = "The Book of Django"
book, created = p.books.update_or_create(defaults={"name": name}, id=book.id)
self.assertFalse(created)
self.assertEqual(book.name, name)
self.assertEqual(p.books.count(), 1)
def test_create_with_many(self):
"""
Should be able to use update_or_create from the m2m related manager to
create a book. Refs #23611.
"""
p = Publisher.objects.create(name="Acme Publishing")
author = Author.objects.create(name="Ted")
book, created = author.books.update_or_create(
name="The Book of Ed & Fred", publisher=p
)
self.assertTrue(created)
self.assertEqual(author.books.count(), 1)
def test_update_with_many(self):
"""
Should be able to use update_or_create from the m2m related manager to
update a book. Refs #23611.
"""
p = Publisher.objects.create(name="Acme Publishing")
author = Author.objects.create(name="Ted")
book = Book.objects.create(name="The Book of Ed & Fred", publisher=p)
book.authors.add(author)
self.assertEqual(author.books.count(), 1)
name = "The Book of Django"
book, created = author.books.update_or_create(
defaults={"name": name}, id=book.id
)
self.assertFalse(created)
self.assertEqual(book.name, name)
self.assertEqual(author.books.count(), 1)
def test_defaults_exact(self):
"""
If you have a field named defaults and want to use it as an exact
lookup, you need to use 'defaults__exact'.
"""
obj, created = Person.objects.update_or_create(
first_name="George",
last_name="Harrison",
defaults__exact="testing",
defaults={
"birthday": date(1943, 2, 25),
"defaults": "testing",
},
)
self.assertTrue(created)
self.assertEqual(obj.defaults, "testing")
obj, created = Person.objects.update_or_create(
first_name="George",
last_name="Harrison",
defaults__exact="testing",
defaults={
"birthday": date(1943, 2, 25),
"defaults": "another testing",
},
)
self.assertFalse(created)
self.assertEqual(obj.defaults, "another testing")
def test_create_callable_default(self):
obj, created = Person.objects.update_or_create(
first_name="George",
last_name="Harrison",
defaults={"birthday": lambda: date(1943, 2, 25)},
)
self.assertIs(created, True)
self.assertEqual(obj.birthday, date(1943, 2, 25))
def test_update_callable_default(self):
Person.objects.update_or_create(
first_name="George",
last_name="Harrison",
birthday=date(1942, 2, 25),
)
obj, created = Person.objects.update_or_create(
first_name="George",
defaults={"last_name": lambda: "NotHarrison"},
)
self.assertIs(created, False)
self.assertEqual(obj.last_name, "NotHarrison")
def test_defaults_not_evaluated_unless_needed(self):
"""`defaults` aren't evaluated if the instance isn't created."""
Person.objects.create(
first_name="John", last_name="Lennon", birthday=date(1940, 10, 9)
)
def raise_exception():
raise AssertionError
obj, created = Person.objects.get_or_create(
first_name="John",
defaults=lazy(raise_exception, object)(),
)
self.assertFalse(created)
class UpdateOrCreateTestsWithManualPKs(TestCase):
def test_create_with_duplicate_primary_key(self):
"""
If an existing primary key is specified with different values for other
fields, then IntegrityError is raised and data isn't updated.
"""
ManualPrimaryKeyTest.objects.create(id=1, data="Original")
with self.assertRaises(IntegrityError):
ManualPrimaryKeyTest.objects.update_or_create(id=1, data="Different")
self.assertEqual(ManualPrimaryKeyTest.objects.get(id=1).data, "Original")
class UpdateOrCreateTransactionTests(TransactionTestCase):
available_apps = ["get_or_create"]
@skipUnlessDBFeature("has_select_for_update")
@skipUnlessDBFeature("supports_transactions")
def test_updates_in_transaction(self):
"""
Objects are selected and updated in a transaction to avoid race
conditions. This test forces update_or_create() to hold the lock
in another thread for a relatively long time so that it can update
while it holds the lock. The updated field isn't a field in 'defaults',
so update_or_create() shouldn't have an effect on it.
"""
lock_status = {"has_grabbed_lock": False}
def birthday_sleep():
lock_status["has_grabbed_lock"] = True
time.sleep(0.5)
return date(1940, 10, 10)
def update_birthday_slowly():
Person.objects.update_or_create(
first_name="John", defaults={"birthday": birthday_sleep}
)
# Avoid leaking connection for Oracle
connection.close()
def lock_wait():
# timeout after ~0.5 seconds
for i in range(20):
time.sleep(0.025)
if lock_status["has_grabbed_lock"]:
return True
return False
Person.objects.create(
first_name="John", last_name="Lennon", birthday=date(1940, 10, 9)
)
# update_or_create in a separate thread
t = Thread(target=update_birthday_slowly)
before_start = datetime.now()
t.start()
if not lock_wait():
self.skipTest("Database took too long to lock the row")
# Update during lock
Person.objects.filter(first_name="John").update(last_name="NotLennon")
after_update = datetime.now()
# Wait for thread to finish
t.join()
# The update remains and it blocked.
updated_person = Person.objects.get(first_name="John")
self.assertGreater(after_update - before_start, timedelta(seconds=0.5))
self.assertEqual(updated_person.last_name, "NotLennon")
@skipUnlessDBFeature("has_select_for_update")
@skipUnlessDBFeature("supports_transactions")
def test_creation_in_transaction(self):
"""
Objects are selected and updated in a transaction to avoid race
conditions. This test checks the behavior of update_or_create() when
the object doesn't already exist, but another thread creates the
object before update_or_create() does and then attempts to update the
object, also before update_or_create(). It forces update_or_create() to
hold the lock in another thread for a relatively long time so that it
can update while it holds the lock. The updated field isn't a field in
'defaults', so update_or_create() shouldn't have an effect on it.
"""
lock_status = {"lock_count": 0}
def birthday_sleep():
lock_status["lock_count"] += 1
time.sleep(0.5)
return date(1940, 10, 10)
def update_birthday_slowly():
try:
Person.objects.update_or_create(
first_name="John", defaults={"birthday": birthday_sleep}
)
finally:
# Avoid leaking connection for Oracle
connection.close()
def lock_wait(expected_lock_count):
# timeout after ~0.5 seconds
for i in range(20):
time.sleep(0.025)
if lock_status["lock_count"] == expected_lock_count:
return True
self.skipTest("Database took too long to lock the row")
# update_or_create in a separate thread.
t = Thread(target=update_birthday_slowly)
before_start = datetime.now()
t.start()
lock_wait(1)
# Create object *after* initial attempt by update_or_create to get obj
# but before creation attempt.
Person.objects.create(
first_name="John", last_name="Lennon", birthday=date(1940, 10, 9)
)
lock_wait(2)
# At this point, the thread is pausing for 0.5 seconds, so now attempt
# to modify object before update_or_create() calls save(). This should
# be blocked until after the save().
Person.objects.filter(first_name="John").update(last_name="NotLennon")
after_update = datetime.now()
# Wait for thread to finish
t.join()
# Check call to update_or_create() succeeded and the subsequent
# (blocked) call to update().
updated_person = Person.objects.get(first_name="John")
self.assertEqual(
updated_person.birthday, date(1940, 10, 10)
) # set by update_or_create()
self.assertEqual(updated_person.last_name, "NotLennon") # set by update()
self.assertGreater(after_update - before_start, timedelta(seconds=1))
class InvalidCreateArgumentsTests(TransactionTestCase):
available_apps = ["get_or_create"]
msg = "Invalid field name(s) for model Thing: 'nonexistent'."
bad_field_msg = (
"Cannot resolve keyword 'nonexistent' into field. Choices are: id, name, tags"
)
def test_get_or_create_with_invalid_defaults(self):
with self.assertRaisesMessage(FieldError, self.msg):
Thing.objects.get_or_create(name="a", defaults={"nonexistent": "b"})
def test_get_or_create_with_invalid_kwargs(self):
with self.assertRaisesMessage(FieldError, self.bad_field_msg):
Thing.objects.get_or_create(name="a", nonexistent="b")
def test_update_or_create_with_invalid_defaults(self):
with self.assertRaisesMessage(FieldError, self.msg):
Thing.objects.update_or_create(name="a", defaults={"nonexistent": "b"})
def test_update_or_create_with_invalid_kwargs(self):
with self.assertRaisesMessage(FieldError, self.bad_field_msg):
Thing.objects.update_or_create(name="a", nonexistent="b")
def test_multiple_invalid_fields(self):
with self.assertRaisesMessage(FieldError, self.bad_field_msg):
Thing.objects.update_or_create(
name="a", nonexistent="b", defaults={"invalid": "c"}
)
def test_property_attribute_without_setter_defaults(self):
with self.assertRaisesMessage(
FieldError, "Invalid field name(s) for model Thing: 'name_in_all_caps'"
):
Thing.objects.update_or_create(
name="a", defaults={"name_in_all_caps": "FRANK"}
)
def test_property_attribute_without_setter_kwargs(self):
msg = (
"Cannot resolve keyword 'name_in_all_caps' into field. Choices are: id, "
"name, tags"
)
with self.assertRaisesMessage(FieldError, msg):
Thing.objects.update_or_create(
name_in_all_caps="FRANK", defaults={"name": "Frank"}
)
|
c25b5abdabed91c7c6c40a1c210dc9e78c430c16ce36bb7d68e478e54dc92801 | from django.db import models
class Person(models.Model):
first_name = models.CharField(max_length=100, unique=True)
last_name = models.CharField(max_length=100)
birthday = models.DateField()
defaults = models.TextField()
class DefaultPerson(models.Model):
first_name = models.CharField(max_length=100, default="Anonymous")
class ManualPrimaryKeyTest(models.Model):
id = models.IntegerField(primary_key=True)
data = models.CharField(max_length=100)
class Profile(models.Model):
person = models.ForeignKey(Person, models.CASCADE, primary_key=True)
class Tag(models.Model):
text = models.CharField(max_length=255, unique=True)
class Thing(models.Model):
name = models.CharField(max_length=255)
tags = models.ManyToManyField(Tag)
@property
def capitalized_name_property(self):
return self.name
@capitalized_name_property.setter
def capitalized_name_property(self, val):
self.name = val.capitalize()
@property
def name_in_all_caps(self):
return self.name.upper()
class Publisher(models.Model):
name = models.CharField(max_length=100)
class Author(models.Model):
name = models.CharField(max_length=100)
class Book(models.Model):
name = models.CharField(max_length=100)
authors = models.ManyToManyField(Author, related_name="books")
publisher = models.ForeignKey(
Publisher,
models.CASCADE,
related_name="books",
db_column="publisher_id_column",
)
|
b55ba694fb177052843a27a74ef47cd8121a4db52c3da4b4bde64d7fdde7172c | import re
import types
from datetime import datetime, timedelta
from decimal import Decimal
from unittest import TestCase, mock
from django.core.exceptions import ValidationError
from django.core.files.base import ContentFile
from django.core.validators import (
BaseValidator,
DecimalValidator,
EmailValidator,
FileExtensionValidator,
MaxLengthValidator,
MaxValueValidator,
MinLengthValidator,
MinValueValidator,
ProhibitNullCharactersValidator,
RegexValidator,
URLValidator,
int_list_validator,
validate_comma_separated_integer_list,
validate_email,
validate_image_file_extension,
validate_integer,
validate_ipv4_address,
validate_ipv6_address,
validate_ipv46_address,
validate_slug,
validate_unicode_slug,
)
from django.test import SimpleTestCase
try:
from PIL import Image # noqa
except ImportError:
PILLOW_IS_INSTALLED = False
else:
PILLOW_IS_INSTALLED = True
NOW = datetime.now()
EXTENDED_SCHEMES = ["http", "https", "ftp", "ftps", "git", "file", "git+ssh"]
VALID_URLS = [
"http://www.djangoproject.com/",
"HTTP://WWW.DJANGOPROJECT.COM/",
"http://localhost/",
"http://example.com/",
"http://example.com:0",
"http://example.com:0/",
"http://example.com:65535",
"http://example.com:65535/",
"http://example.com./",
"http://www.example.com/",
"http://www.example.com:8000/test",
"http://valid-with-hyphens.com/",
"http://subdomain.example.com/",
"http://a.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"http://200.8.9.10/",
"http://200.8.9.10:8000/test",
"http://su--b.valid-----hyphens.com/",
"http://example.com?something=value",
"http://example.com/index.php?something=value&another=value2",
"https://example.com/",
"ftp://example.com/",
"ftps://example.com/",
"http://foo.com/blah_blah",
"http://foo.com/blah_blah/",
"http://foo.com/blah_blah_(wikipedia)",
"http://foo.com/blah_blah_(wikipedia)_(again)",
"http://www.example.com/wpstyle/?p=364",
"https://www.example.com/foo/?bar=baz&inga=42&quux",
"http://✪df.ws/123",
"http://[email protected]",
"http://[email protected]/",
"http://[email protected]:8080",
"http://[email protected]:8080/",
"http://[email protected]:65535",
"http://[email protected]:65535/",
"http://userid:@example.com",
"http://userid:@example.com/",
"http://userid:@example.com:8080",
"http://userid:@example.com:8080/",
"http://userid:[email protected]",
"http://userid:[email protected]/",
"http://userid:[email protected]:8",
"http://userid:[email protected]:8/",
"http://userid:[email protected]:8080",
"http://userid:[email protected]:8080/",
"http://userid:[email protected]:65535",
"http://userid:[email protected]:65535/",
"https://userid:paaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"[email protected]",
"https://userid:paaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"[email protected]:8080",
"https://useridddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"
"ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"
"ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"
"dddddddddddddddddddddd:[email protected]",
"https://useridddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"
"ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"
"ddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd"
"ddddddddddddddddd:[email protected]:8080",
"http://142.42.1.1/",
"http://142.42.1.1:8080/",
"http://➡.ws/䨹",
"http://⌘.ws",
"http://⌘.ws/",
"http://foo.com/blah_(wikipedia)#cite-1",
"http://foo.com/blah_(wikipedia)_blah#cite-1",
"http://foo.com/unicode_(✪)_in_parens",
"http://foo.com/(something)?after=parens",
"http://☺.damowmow.com/",
"http://djangoproject.com/events/#&product=browser",
"http://j.mp",
"ftp://foo.bar/baz",
"http://foo.bar/?q=Test%20URL-encoded%20stuff",
"http://مثال.إختبار",
"http://例子.测试",
"http://उदाहरण.परीक्षा",
"http://-.~_!$&'()*+,;=%40:80%[email protected]",
"http://xn--7sbb4ac0ad0be6cf.xn--p1ai",
"http://1337.net",
"http://a.b-c.de",
"http://223.255.255.254",
"ftps://foo.bar/",
"http://10.1.1.254",
"http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:80/index.html",
"http://[::192.9.5.5]/ipng",
"http://[::ffff:192.9.5.5]/ipng",
"http://[::1]:8080/",
"http://0.0.0.0/",
"http://255.255.255.255",
"http://224.0.0.0",
"http://224.1.1.1",
"http://111.112.113.114/",
"http://88.88.88.88/",
"http://11.12.13.14/",
"http://10.20.30.40/",
"http://1.2.3.4/",
"http://127.0.01.09.home.lan",
"http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.ex"
"ample.com",
"http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaa.com",
"http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"http://dashintld.c-m",
"http://multipledashintld.a-b-c",
"http://evenmoredashintld.a---c",
"http://dashinpunytld.xn---c",
]
INVALID_URLS = [
None,
56,
"no_scheme",
"foo",
"http://",
"http://example",
"http://example.",
"http://example.com:-1",
"http://example.com:-1/",
"http://example.com:000000080",
"http://example.com:000000080/",
"http://.com",
"http://invalid-.com",
"http://-invalid.com",
"http://invalid.com-",
"http://invalid.-com",
"http://inv-.alid-.com",
"http://inv-.-alid.com",
"file://localhost/path",
"git://example.com/",
"http://.",
"http://..",
"http://../",
"http://?",
"http://??",
"http://??/",
"http://#",
"http://##",
"http://##/",
"http://foo.bar?q=Spaces should be encoded",
"//",
"//a",
"///a",
"///",
"http:///a",
"foo.com",
"rdar://1234",
"h://test",
"http:// shouldfail.com",
":// should fail",
"http://foo.bar/foo(bar)baz quux",
"http://-error-.invalid/",
"http://dashinpunytld.trailingdot.xn--.",
"http://dashinpunytld.xn---",
"http://-a.b.co",
"http://a.b-.co",
"http://a.-b.co",
"http://a.b-.c.co",
"http:/",
"http://",
"http://",
"http://1.1.1.1.1",
"http://123.123.123",
"http://3628126748",
"http://123",
"http://000.000.000.000",
"http://016.016.016.016",
"http://192.168.000.001",
"http://01.2.3.4",
"http://01.2.3.4",
"http://1.02.3.4",
"http://1.2.03.4",
"http://1.2.3.04",
"http://.www.foo.bar/",
"http://.www.foo.bar./",
"http://[::1:2::3]:8/",
"http://[::1:2::3]:8080/",
"http://[]",
"http://[]:8080",
"http://example..com/",
"http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.e"
"xample.com",
"http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaa.com",
"http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaa",
"http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaa."
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaa"
"aaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaa"
"aaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaa",
"https://test.[com",
"http://@example.com",
"http://:@example.com",
"http://:[email protected]",
"http://foo@[email protected]",
"http://foo/[email protected]",
"http://foo:bar:[email protected]",
"http://foo:bar@[email protected]",
"http://foo:bar/[email protected]",
"http://invalid-.com/[email protected]",
# Newlines and tabs are not accepted.
"http://www.djangoproject.com/\n",
"http://[::ffff:192.9.5.5]\n",
"http://www.djangoproject.com/\r",
"http://[::ffff:192.9.5.5]\r",
"http://www.django\rproject.com/",
"http://[::\rffff:192.9.5.5]",
"http://\twww.djangoproject.com/",
"http://\t[::ffff:192.9.5.5]",
# Trailing junk does not take forever to reject.
"http://www.asdasdasdasdsadfm.com.br ",
"http://www.asdasdasdasdsadfm.com.br z",
]
TEST_DATA = [
# (validator, value, expected),
(validate_integer, "42", None),
(validate_integer, "-42", None),
(validate_integer, -42, None),
(validate_integer, -42.5, ValidationError),
(validate_integer, None, ValidationError),
(validate_integer, "a", ValidationError),
(validate_integer, "\n42", ValidationError),
(validate_integer, "42\n", ValidationError),
(validate_email, "[email protected]", None),
(validate_email, "[email protected]", None),
(validate_email, "email@[127.0.0.1]", None),
(validate_email, "email@[2001:dB8::1]", None),
(validate_email, "email@[2001:dB8:0:0:0:0:0:1]", None),
(validate_email, "email@[::fffF:127.0.0.1]", None),
(validate_email, "[email protected]", None),
(validate_email, "[email protected]", None),
(validate_email, "[email protected].उदाहरण.परीक्षा", None),
(validate_email, "email@localhost", None),
(EmailValidator(allowlist=["localdomain"]), "email@localdomain", None),
(validate_email, '"test@test"@example.com', None),
(validate_email, "example@atm.%s" % ("a" * 63), None),
(validate_email, "example@%s.atm" % ("a" * 63), None),
(validate_email, "example@%s.%s.atm" % ("a" * 63, "b" * 10), None),
(validate_email, "example@atm.%s" % ("a" * 64), ValidationError),
(validate_email, "example@%s.atm.%s" % ("b" * 64, "a" * 63), ValidationError),
(validate_email, None, ValidationError),
(validate_email, "", ValidationError),
(validate_email, "abc", ValidationError),
(validate_email, "abc@", ValidationError),
(validate_email, "abc@bar", ValidationError),
(validate_email, "a @x.cz", ValidationError),
(validate_email, "[email protected]", ValidationError),
(validate_email, "something@@somewhere.com", ValidationError),
(validate_email, "[email protected]", ValidationError),
(validate_email, "email@[127.0.0.256]", ValidationError),
(validate_email, "email@[2001:db8::12345]", ValidationError),
(validate_email, "email@[2001:db8:0:0:0:0:1]", ValidationError),
(validate_email, "email@[::ffff:127.0.0.256]", ValidationError),
(validate_email, "email@[2001:dg8::1]", ValidationError),
(validate_email, "email@[2001:dG8:0:0:0:0:0:1]", ValidationError),
(validate_email, "email@[::fTzF:127.0.0.1]", ValidationError),
(validate_email, "[email protected]", ValidationError),
(validate_email, "[email protected]", ValidationError),
(validate_email, "[email protected]", ValidationError),
(validate_email, "[email protected]", ValidationError),
(validate_email, "[email protected]", ValidationError),
(validate_email, '[email protected]\n\n<script src="x.js">', ValidationError),
# Quoted-string format (CR not allowed)
(validate_email, '"\\\011"@here.com', None),
(validate_email, '"\\\012"@here.com', ValidationError),
(validate_email, "[email protected].", ValidationError),
# Max length of domain name labels is 63 characters per RFC 1034.
(validate_email, "a@%s.us" % ("a" * 63), None),
(validate_email, "a@%s.us" % ("a" * 64), ValidationError),
# Trailing newlines in username or domain not allowed
(validate_email, "[email protected]\n", ValidationError),
(validate_email, "a\[email protected]", ValidationError),
(validate_email, '"test@test"\[email protected]', ValidationError),
(validate_email, "a@[127.0.0.1]\n", ValidationError),
(validate_slug, "slug-ok", None),
(validate_slug, "longer-slug-still-ok", None),
(validate_slug, "--------", None),
(validate_slug, "nohyphensoranything", None),
(validate_slug, "a", None),
(validate_slug, "1", None),
(validate_slug, "a1", None),
(validate_slug, "", ValidationError),
(validate_slug, " text ", ValidationError),
(validate_slug, " ", ValidationError),
(validate_slug, "[email protected]", ValidationError),
(validate_slug, "你好", ValidationError),
(validate_slug, "你 好", ValidationError),
(validate_slug, "\n", ValidationError),
(validate_slug, "trailing-newline\n", ValidationError),
(validate_unicode_slug, "slug-ok", None),
(validate_unicode_slug, "longer-slug-still-ok", None),
(validate_unicode_slug, "--------", None),
(validate_unicode_slug, "nohyphensoranything", None),
(validate_unicode_slug, "a", None),
(validate_unicode_slug, "1", None),
(validate_unicode_slug, "a1", None),
(validate_unicode_slug, "你好", None),
(validate_unicode_slug, "", ValidationError),
(validate_unicode_slug, " text ", ValidationError),
(validate_unicode_slug, " ", ValidationError),
(validate_unicode_slug, "[email protected]", ValidationError),
(validate_unicode_slug, "\n", ValidationError),
(validate_unicode_slug, "你 好", ValidationError),
(validate_unicode_slug, "trailing-newline\n", ValidationError),
(validate_ipv4_address, "1.1.1.1", None),
(validate_ipv4_address, "255.0.0.0", None),
(validate_ipv4_address, "0.0.0.0", None),
(validate_ipv4_address, "256.1.1.1", ValidationError),
(validate_ipv4_address, "25.1.1.", ValidationError),
(validate_ipv4_address, "25,1,1,1", ValidationError),
(validate_ipv4_address, "25.1 .1.1", ValidationError),
(validate_ipv4_address, "1.1.1.1\n", ValidationError),
(validate_ipv4_address, "٧.2٥.3٣.243", ValidationError),
# Leading zeros are forbidden to avoid ambiguity with the octal notation.
(validate_ipv4_address, "000.000.000.000", ValidationError),
(validate_ipv4_address, "016.016.016.016", ValidationError),
(validate_ipv4_address, "192.168.000.001", ValidationError),
(validate_ipv4_address, "01.2.3.4", ValidationError),
(validate_ipv4_address, "01.2.3.4", ValidationError),
(validate_ipv4_address, "1.02.3.4", ValidationError),
(validate_ipv4_address, "1.2.03.4", ValidationError),
(validate_ipv4_address, "1.2.3.04", ValidationError),
# validate_ipv6_address uses django.utils.ipv6, which
# is tested in much greater detail in its own testcase
(validate_ipv6_address, "fe80::1", None),
(validate_ipv6_address, "::1", None),
(validate_ipv6_address, "1:2:3:4:5:6:7:8", None),
(validate_ipv6_address, "1:2", ValidationError),
(validate_ipv6_address, "::zzz", ValidationError),
(validate_ipv6_address, "12345::", ValidationError),
(validate_ipv46_address, "1.1.1.1", None),
(validate_ipv46_address, "255.0.0.0", None),
(validate_ipv46_address, "0.0.0.0", None),
(validate_ipv46_address, "fe80::1", None),
(validate_ipv46_address, "::1", None),
(validate_ipv46_address, "1:2:3:4:5:6:7:8", None),
(validate_ipv46_address, "256.1.1.1", ValidationError),
(validate_ipv46_address, "25.1.1.", ValidationError),
(validate_ipv46_address, "25,1,1,1", ValidationError),
(validate_ipv46_address, "25.1 .1.1", ValidationError),
(validate_ipv46_address, "1:2", ValidationError),
(validate_ipv46_address, "::zzz", ValidationError),
(validate_ipv46_address, "12345::", ValidationError),
# Leading zeros are forbidden to avoid ambiguity with the octal notation.
(validate_ipv46_address, "000.000.000.000", ValidationError),
(validate_ipv46_address, "016.016.016.016", ValidationError),
(validate_ipv46_address, "192.168.000.001", ValidationError),
(validate_ipv46_address, "01.2.3.4", ValidationError),
(validate_ipv46_address, "01.2.3.4", ValidationError),
(validate_ipv46_address, "1.02.3.4", ValidationError),
(validate_ipv46_address, "1.2.03.4", ValidationError),
(validate_ipv46_address, "1.2.3.04", ValidationError),
(validate_comma_separated_integer_list, "1", None),
(validate_comma_separated_integer_list, "12", None),
(validate_comma_separated_integer_list, "1,2", None),
(validate_comma_separated_integer_list, "1,2,3", None),
(validate_comma_separated_integer_list, "10,32", None),
(validate_comma_separated_integer_list, "", ValidationError),
(validate_comma_separated_integer_list, "a", ValidationError),
(validate_comma_separated_integer_list, "a,b,c", ValidationError),
(validate_comma_separated_integer_list, "1, 2, 3", ValidationError),
(validate_comma_separated_integer_list, ",", ValidationError),
(validate_comma_separated_integer_list, "1,2,3,", ValidationError),
(validate_comma_separated_integer_list, "1,2,", ValidationError),
(validate_comma_separated_integer_list, ",1", ValidationError),
(validate_comma_separated_integer_list, "1,,2", ValidationError),
(int_list_validator(sep="."), "1.2.3", None),
(int_list_validator(sep=".", allow_negative=True), "1.2.3", None),
(int_list_validator(allow_negative=True), "-1,-2,3", None),
(int_list_validator(allow_negative=True), "1,-2,-12", None),
(int_list_validator(), "-1,2,3", ValidationError),
(int_list_validator(sep="."), "1,2,3", ValidationError),
(int_list_validator(sep="."), "1.2.3\n", ValidationError),
(MaxValueValidator(10), 10, None),
(MaxValueValidator(10), -10, None),
(MaxValueValidator(10), 0, None),
(MaxValueValidator(NOW), NOW, None),
(MaxValueValidator(NOW), NOW - timedelta(days=1), None),
(MaxValueValidator(0), 1, ValidationError),
(MaxValueValidator(NOW), NOW + timedelta(days=1), ValidationError),
(MinValueValidator(-10), -10, None),
(MinValueValidator(-10), 10, None),
(MinValueValidator(-10), 0, None),
(MinValueValidator(NOW), NOW, None),
(MinValueValidator(NOW), NOW + timedelta(days=1), None),
(MinValueValidator(0), -1, ValidationError),
(MinValueValidator(NOW), NOW - timedelta(days=1), ValidationError),
# limit_value may be a callable.
(MinValueValidator(lambda: 1), 0, ValidationError),
(MinValueValidator(lambda: 1), 1, None),
(MaxLengthValidator(10), "", None),
(MaxLengthValidator(10), 10 * "x", None),
(MaxLengthValidator(10), 15 * "x", ValidationError),
(MinLengthValidator(10), 15 * "x", None),
(MinLengthValidator(10), 10 * "x", None),
(MinLengthValidator(10), "", ValidationError),
(URLValidator(EXTENDED_SCHEMES), "file://localhost/path", None),
(URLValidator(EXTENDED_SCHEMES), "git://example.com/", None),
(
URLValidator(EXTENDED_SCHEMES),
"git+ssh://[email protected]/example/hg-git.git",
None,
),
(URLValidator(EXTENDED_SCHEMES), "git://-invalid.com", ValidationError),
(BaseValidator(True), True, None),
(BaseValidator(True), False, ValidationError),
(RegexValidator(), "", None),
(RegexValidator(), "x1x2", None),
(RegexValidator("[0-9]+"), "xxxxxx", ValidationError),
(RegexValidator("[0-9]+"), "1234", None),
(RegexValidator(re.compile("[0-9]+")), "1234", None),
(RegexValidator(".*"), "", None),
(RegexValidator(re.compile(".*")), "", None),
(RegexValidator(".*"), "xxxxx", None),
(RegexValidator("x"), "y", ValidationError),
(RegexValidator(re.compile("x")), "y", ValidationError),
(RegexValidator("x", inverse_match=True), "y", None),
(RegexValidator(re.compile("x"), inverse_match=True), "y", None),
(RegexValidator("x", inverse_match=True), "x", ValidationError),
(RegexValidator(re.compile("x"), inverse_match=True), "x", ValidationError),
(RegexValidator("x", flags=re.IGNORECASE), "y", ValidationError),
(RegexValidator("a"), "A", ValidationError),
(RegexValidator("a", flags=re.IGNORECASE), "A", None),
(
FileExtensionValidator(["txt"]),
ContentFile("contents", name="fileWithUnsupportedExt.jpg"),
ValidationError,
),
(
FileExtensionValidator(["txt"]),
ContentFile("contents", name="fileWithUnsupportedExt.JPG"),
ValidationError,
),
(
FileExtensionValidator(["txt"]),
ContentFile("contents", name="fileWithNoExtension"),
ValidationError,
),
(
FileExtensionValidator([""]),
ContentFile("contents", name="fileWithAnExtension.txt"),
ValidationError,
),
(
FileExtensionValidator([]),
ContentFile("contents", name="file.txt"),
ValidationError,
),
(
FileExtensionValidator([""]),
ContentFile("contents", name="fileWithNoExtension"),
None,
),
(FileExtensionValidator(["txt"]), ContentFile("contents", name="file.txt"), None),
(FileExtensionValidator(["txt"]), ContentFile("contents", name="file.TXT"), None),
(FileExtensionValidator(["TXT"]), ContentFile("contents", name="file.txt"), None),
(FileExtensionValidator(), ContentFile("contents", name="file.jpg"), None),
(DecimalValidator(max_digits=2, decimal_places=2), Decimal("0.99"), None),
(
DecimalValidator(max_digits=2, decimal_places=1),
Decimal("0.99"),
ValidationError,
),
(DecimalValidator(max_digits=3, decimal_places=1), Decimal("999"), ValidationError),
(DecimalValidator(max_digits=4, decimal_places=1), Decimal("999"), None),
(
DecimalValidator(max_digits=20, decimal_places=2),
Decimal("742403889818000000"),
None,
),
(DecimalValidator(20, 2), Decimal("7.42403889818E+17"), None),
(
DecimalValidator(max_digits=20, decimal_places=2),
Decimal("7424742403889818000000"),
ValidationError,
),
(DecimalValidator(max_digits=5, decimal_places=2), Decimal("7304E-1"), None),
(
DecimalValidator(max_digits=5, decimal_places=2),
Decimal("7304E-3"),
ValidationError,
),
(DecimalValidator(max_digits=5, decimal_places=5), Decimal("70E-5"), None),
(
DecimalValidator(max_digits=5, decimal_places=5),
Decimal("70E-6"),
ValidationError,
),
# 'Enter a number.' errors
*[
(
DecimalValidator(decimal_places=2, max_digits=10),
Decimal(value),
ValidationError,
)
for value in (
"NaN",
"-NaN",
"+NaN",
"sNaN",
"-sNaN",
"+sNaN",
"Inf",
"-Inf",
"+Inf",
"Infinity",
"-Infinity",
"+Infinity",
)
],
(validate_image_file_extension, ContentFile("contents", name="file.jpg"), None),
(validate_image_file_extension, ContentFile("contents", name="file.png"), None),
(validate_image_file_extension, ContentFile("contents", name="file.PNG"), None),
(
validate_image_file_extension,
ContentFile("contents", name="file.txt"),
ValidationError,
),
(
validate_image_file_extension,
ContentFile("contents", name="file"),
ValidationError,
),
(ProhibitNullCharactersValidator(), "\x00something", ValidationError),
(ProhibitNullCharactersValidator(), "something", None),
(ProhibitNullCharactersValidator(), None, None),
]
# Add valid and invalid URL tests.
# This only tests the validator without extended schemes.
TEST_DATA.extend((URLValidator(), url, None) for url in VALID_URLS)
TEST_DATA.extend((URLValidator(), url, ValidationError) for url in INVALID_URLS)
class TestValidators(SimpleTestCase):
def test_validators(self):
for validator, value, expected in TEST_DATA:
name = (
validator.__name__
if isinstance(validator, types.FunctionType)
else validator.__class__.__name__
)
exception_expected = expected is not None and issubclass(
expected, Exception
)
with self.subTest(name, value=value):
if (
validator is validate_image_file_extension
and not PILLOW_IS_INSTALLED
):
self.skipTest(
"Pillow is required to test validate_image_file_extension."
)
if exception_expected:
with self.assertRaises(expected):
validator(value)
else:
self.assertEqual(expected, validator(value))
def test_single_message(self):
v = ValidationError("Not Valid")
self.assertEqual(str(v), "['Not Valid']")
self.assertEqual(repr(v), "ValidationError(['Not Valid'])")
def test_message_list(self):
v = ValidationError(["First Problem", "Second Problem"])
self.assertEqual(str(v), "['First Problem', 'Second Problem']")
self.assertEqual(
repr(v), "ValidationError(['First Problem', 'Second Problem'])"
)
def test_message_dict(self):
v = ValidationError({"first": ["First Problem"]})
self.assertEqual(str(v), "{'first': ['First Problem']}")
self.assertEqual(repr(v), "ValidationError({'first': ['First Problem']})")
def test_regex_validator_flags(self):
msg = "If the flags are set, regex must be a regular expression string."
with self.assertRaisesMessage(TypeError, msg):
RegexValidator(re.compile("a"), flags=re.IGNORECASE)
def test_max_length_validator_message(self):
v = MaxLengthValidator(
16, message='"%(value)s" has more than %(limit_value)d characters.'
)
with self.assertRaisesMessage(
ValidationError, '"djangoproject.com" has more than 16 characters.'
):
v("djangoproject.com")
class TestValidatorEquality(TestCase):
"""
Validators have valid equality operators (#21638)
"""
def test_regex_equality(self):
self.assertEqual(
RegexValidator(r"^(?:[a-z0-9\.\-]*)://"),
RegexValidator(r"^(?:[a-z0-9\.\-]*)://"),
)
self.assertNotEqual(
RegexValidator(r"^(?:[a-z0-9\.\-]*)://"),
RegexValidator(r"^(?:[0-9\.\-]*)://"),
)
self.assertEqual(
RegexValidator(r"^(?:[a-z0-9\.\-]*)://", "oh noes", "invalid"),
RegexValidator(r"^(?:[a-z0-9\.\-]*)://", "oh noes", "invalid"),
)
self.assertNotEqual(
RegexValidator(r"^(?:[a-z0-9\.\-]*)://", "oh", "invalid"),
RegexValidator(r"^(?:[a-z0-9\.\-]*)://", "oh noes", "invalid"),
)
self.assertNotEqual(
RegexValidator(r"^(?:[a-z0-9\.\-]*)://", "oh noes", "invalid"),
RegexValidator(r"^(?:[a-z0-9\.\-]*)://"),
)
self.assertNotEqual(
RegexValidator("", flags=re.IGNORECASE),
RegexValidator(""),
)
self.assertNotEqual(
RegexValidator(""),
RegexValidator("", inverse_match=True),
)
def test_regex_equality_nocache(self):
pattern = r"^(?:[a-z0-9\.\-]*)://"
left = RegexValidator(pattern)
re.purge()
right = RegexValidator(pattern)
self.assertEqual(
left,
right,
)
def test_regex_equality_blank(self):
self.assertEqual(
RegexValidator(),
RegexValidator(),
)
def test_email_equality(self):
self.assertEqual(
EmailValidator(),
EmailValidator(),
)
self.assertNotEqual(
EmailValidator(message="BAD EMAIL"),
EmailValidator(),
)
self.assertEqual(
EmailValidator(message="BAD EMAIL", code="bad"),
EmailValidator(message="BAD EMAIL", code="bad"),
)
def test_basic_equality(self):
self.assertEqual(
MaxValueValidator(44),
MaxValueValidator(44),
)
self.assertEqual(MaxValueValidator(44), mock.ANY)
self.assertNotEqual(
MaxValueValidator(44),
MinValueValidator(44),
)
self.assertNotEqual(
MinValueValidator(45),
MinValueValidator(11),
)
def test_decimal_equality(self):
self.assertEqual(
DecimalValidator(1, 2),
DecimalValidator(1, 2),
)
self.assertNotEqual(
DecimalValidator(1, 2),
DecimalValidator(1, 1),
)
self.assertNotEqual(
DecimalValidator(1, 2),
DecimalValidator(2, 2),
)
self.assertNotEqual(
DecimalValidator(1, 2),
MinValueValidator(11),
)
def test_file_extension_equality(self):
self.assertEqual(FileExtensionValidator(), FileExtensionValidator())
self.assertEqual(
FileExtensionValidator(["txt"]), FileExtensionValidator(["txt"])
)
self.assertEqual(
FileExtensionValidator(["TXT"]), FileExtensionValidator(["txt"])
)
self.assertEqual(
FileExtensionValidator(["TXT", "png"]),
FileExtensionValidator(["txt", "png"]),
)
self.assertEqual(
FileExtensionValidator(["txt"]),
FileExtensionValidator(["txt"], code="invalid_extension"),
)
self.assertNotEqual(
FileExtensionValidator(["txt"]), FileExtensionValidator(["png"])
)
self.assertNotEqual(
FileExtensionValidator(["txt"]), FileExtensionValidator(["png", "jpg"])
)
self.assertNotEqual(
FileExtensionValidator(["txt"]),
FileExtensionValidator(["txt"], code="custom_code"),
)
self.assertNotEqual(
FileExtensionValidator(["txt"]),
FileExtensionValidator(["txt"], message="custom error message"),
)
def test_prohibit_null_characters_validator_equality(self):
self.assertEqual(
ProhibitNullCharactersValidator(message="message", code="code"),
ProhibitNullCharactersValidator(message="message", code="code"),
)
self.assertEqual(
ProhibitNullCharactersValidator(), ProhibitNullCharactersValidator()
)
self.assertNotEqual(
ProhibitNullCharactersValidator(message="message1", code="code"),
ProhibitNullCharactersValidator(message="message2", code="code"),
)
self.assertNotEqual(
ProhibitNullCharactersValidator(message="message", code="code1"),
ProhibitNullCharactersValidator(message="message", code="code2"),
)
|
60194cc29048502023ae82706b600f6b1c5a23c3726b170b8d52507ae3bb8f76 | from django.core import signing
from django.http import HttpRequest, HttpResponse
from django.test import SimpleTestCase, override_settings
from django.test.utils import freeze_time
class SignedCookieTest(SimpleTestCase):
def test_can_set_and_read_signed_cookies(self):
response = HttpResponse()
response.set_signed_cookie("c", "hello")
self.assertIn("c", response.cookies)
self.assertTrue(response.cookies["c"].value.startswith("hello:"))
request = HttpRequest()
request.COOKIES["c"] = response.cookies["c"].value
value = request.get_signed_cookie("c")
self.assertEqual(value, "hello")
def test_can_use_salt(self):
response = HttpResponse()
response.set_signed_cookie("a", "hello", salt="one")
request = HttpRequest()
request.COOKIES["a"] = response.cookies["a"].value
value = request.get_signed_cookie("a", salt="one")
self.assertEqual(value, "hello")
with self.assertRaises(signing.BadSignature):
request.get_signed_cookie("a", salt="two")
def test_detects_tampering(self):
response = HttpResponse()
response.set_signed_cookie("c", "hello")
request = HttpRequest()
request.COOKIES["c"] = response.cookies["c"].value[:-2] + "$$"
with self.assertRaises(signing.BadSignature):
request.get_signed_cookie("c")
def test_default_argument_suppresses_exceptions(self):
response = HttpResponse()
response.set_signed_cookie("c", "hello")
request = HttpRequest()
request.COOKIES["c"] = response.cookies["c"].value[:-2] + "$$"
self.assertIsNone(request.get_signed_cookie("c", default=None))
def test_max_age_argument(self):
value = "hello"
with freeze_time(123456789):
response = HttpResponse()
response.set_signed_cookie("c", value)
request = HttpRequest()
request.COOKIES["c"] = response.cookies["c"].value
self.assertEqual(request.get_signed_cookie("c"), value)
with freeze_time(123456800):
self.assertEqual(request.get_signed_cookie("c", max_age=12), value)
self.assertEqual(request.get_signed_cookie("c", max_age=11), value)
with self.assertRaises(signing.SignatureExpired):
request.get_signed_cookie("c", max_age=10)
@override_settings(SECRET_KEY=b"\xe7")
def test_signed_cookies_with_binary_key(self):
response = HttpResponse()
response.set_signed_cookie("c", "hello")
request = HttpRequest()
request.COOKIES["c"] = response.cookies["c"].value
self.assertEqual(request.get_signed_cookie("c"), "hello")
|
0d60be4997547197beedabae01f3ac563b3b366a46c5387392ca3cfb8cec6880 | import os
import re
from io import StringIO
from unittest import mock, skipUnless
from django.core.management import call_command
from django.db import connection
from django.db.backends.base.introspection import TableInfo
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import PeopleMoreData, test_collation
def inspectdb_tables_only(table_name):
"""
Limit introspection to tables created for models of this app.
Some databases such as Oracle are extremely slow at introspection.
"""
return table_name.startswith("inspectdb_")
def inspectdb_views_only(table_name):
return table_name.startswith("inspectdb_") and table_name.endswith(
("_materialized", "_view")
)
def special_table_only(table_name):
return table_name.startswith("inspectdb_special")
class InspectDBTestCase(TestCase):
unique_re = re.compile(r".*unique_together = \((.+),\).*")
def test_stealth_table_name_filter_option(self):
out = StringIO()
call_command("inspectdb", table_name_filter=inspectdb_tables_only, stdout=out)
error_message = (
"inspectdb has examined a table that should have been filtered out."
)
# contrib.contenttypes is one of the apps always installed when running
# the Django test suite, check that one of its tables hasn't been
# inspected
self.assertNotIn(
"class DjangoContentType(models.Model):", out.getvalue(), msg=error_message
)
def test_table_option(self):
"""
inspectdb can inspect a subset of tables by passing the table names as
arguments.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_people", stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbPeople(models.Model):", output)
self.assertNotIn("InspectdbPeopledata", output)
def make_field_type_asserter(self):
"""
Call inspectdb and return a function to validate a field type in its
output.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
def assertFieldType(name, definition):
out_def = re.search(r"^\s*%s = (models.*)$" % name, output, re.MULTILINE)[1]
self.assertEqual(definition, out_def)
return assertFieldType
def test_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
introspected_field_types = connection.features.introspected_field_types
char_field_type = introspected_field_types["CharField"]
# Inspecting Oracle DB doesn't produce correct results (#19884):
# - it reports fields as blank=True when they aren't.
if (
not connection.features.interprets_empty_strings_as_nulls
and char_field_type == "CharField"
):
assertFieldType("char_field", "models.CharField(max_length=10)")
assertFieldType(
"null_char_field",
"models.CharField(max_length=10, blank=True, null=True)",
)
assertFieldType("email_field", "models.CharField(max_length=254)")
assertFieldType("file_field", "models.CharField(max_length=100)")
assertFieldType("file_path_field", "models.CharField(max_length=100)")
assertFieldType("slug_field", "models.CharField(max_length=50)")
assertFieldType("text_field", "models.TextField()")
assertFieldType("url_field", "models.CharField(max_length=200)")
if char_field_type == "TextField":
assertFieldType("char_field", "models.TextField()")
assertFieldType(
"null_char_field", "models.TextField(blank=True, null=True)"
)
assertFieldType("email_field", "models.TextField()")
assertFieldType("file_field", "models.TextField()")
assertFieldType("file_path_field", "models.TextField()")
assertFieldType("slug_field", "models.TextField()")
assertFieldType("text_field", "models.TextField()")
assertFieldType("url_field", "models.TextField()")
assertFieldType("date_field", "models.DateField()")
assertFieldType("date_time_field", "models.DateTimeField()")
if introspected_field_types["GenericIPAddressField"] == "GenericIPAddressField":
assertFieldType("gen_ip_address_field", "models.GenericIPAddressField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType("gen_ip_address_field", "models.CharField(max_length=39)")
assertFieldType(
"time_field", "models.%s()" % introspected_field_types["TimeField"]
)
if connection.features.has_native_uuid_field:
assertFieldType("uuid_field", "models.UUIDField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType("uuid_field", "models.CharField(max_length=32)")
@skipUnlessDBFeature("can_introspect_json_field", "supports_json_field")
def test_json_field(self):
out = StringIO()
call_command("inspectdb", "inspectdb_jsonfieldcolumntype", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn("json_field = models.JSONField()", output)
self.assertIn(
"null_json_field = models.JSONField(blank=True, null=True)", output
)
@skipUnlessDBFeature("supports_collation_on_charfield")
@skipUnless(test_collation, "Language collations are not supported.")
def test_char_field_db_collation(self):
out = StringIO()
call_command("inspectdb", "inspectdb_charfielddbcollation", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn(
"char_field = models.CharField(max_length=10, "
"db_collation='%s')" % test_collation,
output,
)
else:
self.assertIn(
"char_field = models.CharField(max_length=10, "
"db_collation='%s', blank=True, null=True)" % test_collation,
output,
)
@skipUnlessDBFeature("supports_collation_on_textfield")
@skipUnless(test_collation, "Language collations are not supported.")
def test_text_field_db_collation(self):
out = StringIO()
call_command("inspectdb", "inspectdb_textfielddbcollation", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn(
"text_field = models.TextField(db_collation='%s')" % test_collation,
output,
)
else:
self.assertIn(
"text_field = models.TextField(db_collation='%s, blank=True, "
"null=True)" % test_collation,
output,
)
def test_number_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
introspected_field_types = connection.features.introspected_field_types
auto_field_type = connection.features.introspected_field_types["AutoField"]
if auto_field_type != "AutoField":
assertFieldType(
"id", "models.%s(primary_key=True) # AutoField?" % auto_field_type
)
assertFieldType(
"big_int_field", "models.%s()" % introspected_field_types["BigIntegerField"]
)
bool_field_type = introspected_field_types["BooleanField"]
assertFieldType("bool_field", "models.{}()".format(bool_field_type))
assertFieldType(
"null_bool_field",
"models.{}(blank=True, null=True)".format(bool_field_type),
)
if connection.vendor != "sqlite":
assertFieldType(
"decimal_field", "models.DecimalField(max_digits=6, decimal_places=1)"
)
else: # Guessed arguments on SQLite, see #5014
assertFieldType(
"decimal_field",
"models.DecimalField(max_digits=10, decimal_places=5) "
"# max_digits and decimal_places have been guessed, "
"as this database handles decimal fields as float",
)
assertFieldType("float_field", "models.FloatField()")
assertFieldType(
"int_field", "models.%s()" % introspected_field_types["IntegerField"]
)
assertFieldType(
"pos_int_field",
"models.%s()" % introspected_field_types["PositiveIntegerField"],
)
assertFieldType(
"pos_big_int_field",
"models.%s()" % introspected_field_types["PositiveBigIntegerField"],
)
assertFieldType(
"pos_small_int_field",
"models.%s()" % introspected_field_types["PositiveSmallIntegerField"],
)
assertFieldType(
"small_int_field",
"models.%s()" % introspected_field_types["SmallIntegerField"],
)
@skipUnlessDBFeature("can_introspect_foreign_keys")
def test_attribute_name_not_python_keyword(self):
out = StringIO()
call_command("inspectdb", table_name_filter=inspectdb_tables_only, stdout=out)
output = out.getvalue()
error_message = (
"inspectdb generated an attribute name which is a Python keyword"
)
# Recursive foreign keys should be set to 'self'
self.assertIn("parent = models.ForeignKey('self', models.DO_NOTHING)", output)
self.assertNotIn(
"from = models.ForeignKey(InspectdbPeople, models.DO_NOTHING)",
output,
msg=error_message,
)
# As InspectdbPeople model is defined after InspectdbMessage, it should
# be quoted.
self.assertIn(
"from_field = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, "
"db_column='from_id')",
output,
)
self.assertIn(
"people_pk = models.OneToOneField(InspectdbPeople, models.DO_NOTHING, "
"primary_key=True)",
output,
)
self.assertIn(
"people_unique = models.OneToOneField(InspectdbPeople, models.DO_NOTHING)",
output,
)
@skipUnlessDBFeature("can_introspect_foreign_keys")
def test_foreign_key_to_field(self):
out = StringIO()
call_command("inspectdb", "inspectdb_foreignkeytofield", stdout=out)
self.assertIn(
"to_field_fk = models.ForeignKey('InspectdbPeoplemoredata', "
"models.DO_NOTHING, to_field='people_unique_id')",
out.getvalue(),
)
def test_digits_column_name_introspection(self):
"""Introspection of column names consist/start with digits (#16536/#17676)"""
char_field_type = connection.features.introspected_field_types["CharField"]
out = StringIO()
call_command("inspectdb", "inspectdb_digitsincolumnname", stdout=out)
output = out.getvalue()
error_message = "inspectdb generated a model field name which is a number"
self.assertNotIn(
" 123 = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_123 = models.%s" % char_field_type, output)
error_message = (
"inspectdb generated a model field name which starts with a digit"
)
self.assertNotIn(
" 4extra = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_4extra = models.%s" % char_field_type, output)
self.assertNotIn(
" 45extra = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_45extra = models.%s" % char_field_type, output)
def test_special_column_name_introspection(self):
"""
Introspection of column names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command("inspectdb", table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
base_name = connection.introspection.identifier_converter("Field")
integer_field_type = connection.features.introspected_field_types[
"IntegerField"
]
self.assertIn("field = models.%s()" % integer_field_type, output)
self.assertIn(
"field_field = models.%s(db_column='%s_')"
% (integer_field_type, base_name),
output,
)
self.assertIn(
"field_field_0 = models.%s(db_column='%s__')"
% (integer_field_type, base_name),
output,
)
self.assertIn(
"field_field_1 = models.%s(db_column='__field')" % integer_field_type,
output,
)
self.assertIn(
"prc_x = models.{}(db_column='prc(%) x')".format(integer_field_type), output
)
self.assertIn("tamaño = models.%s()" % integer_field_type, output)
def test_table_name_introspection(self):
"""
Introspection of table names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command("inspectdb", table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbSpecialTableName(models.Model):", output)
def test_managed_models(self):
"""
By default the command generates models with `Meta.managed = False`.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
self.longMessage = False
self.assertIn(
" managed = False",
output,
msg="inspectdb should generate unmanaged models.",
)
def test_unique_together_meta(self):
out = StringIO()
call_command("inspectdb", "inspectdb_uniquetogether", stdout=out)
output = out.getvalue()
self.assertIn(" unique_together = (('", output)
unique_together_match = self.unique_re.findall(output)
# There should be one unique_together tuple.
self.assertEqual(len(unique_together_match), 1)
fields = unique_together_match[0]
# Fields with db_column = field name.
self.assertIn("('field1', 'field2')", fields)
# Fields from columns whose names are Python keywords.
self.assertIn("('field1', 'field2')", fields)
# Fields whose names normalize to the same Python field name and hence
# are given an integer suffix.
self.assertIn("('non_unique_column', 'non_unique_column_0')", fields)
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_unsupported_unique_together(self):
"""Unsupported index types (COALESCE here) are skipped."""
with connection.cursor() as c:
c.execute(
"CREATE UNIQUE INDEX Findex ON %s "
"(id, people_unique_id, COALESCE(message_id, -1))"
% PeopleMoreData._meta.db_table
)
try:
out = StringIO()
call_command(
"inspectdb",
table_name_filter=lambda tn: tn.startswith(
PeopleMoreData._meta.db_table
),
stdout=out,
)
output = out.getvalue()
self.assertIn("# A unique constraint could not be introspected.", output)
self.assertEqual(
self.unique_re.findall(output), ["('id', 'people_unique')"]
)
finally:
with connection.cursor() as c:
c.execute("DROP INDEX Findex")
@skipUnless(
connection.vendor == "sqlite",
"Only patched sqlite's DatabaseIntrospection.data_types_reverse for this test",
)
def test_custom_fields(self):
"""
Introspection of columns with a custom field (#21090)
"""
out = StringIO()
with mock.patch(
"django.db.connection.introspection.data_types_reverse."
"base_data_types_reverse",
{
"text": "myfields.TextField",
"bigint": "BigIntegerField",
},
):
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
self.assertIn("text_field = myfields.TextField()", output)
self.assertIn("big_int_field = models.BigIntegerField()", output)
def test_introspection_errors(self):
"""
Introspection errors should not crash the command, and the error should
be visible in the output.
"""
out = StringIO()
with mock.patch(
"django.db.connection.introspection.get_table_list",
return_value=[TableInfo(name="nonexistent", type="t")],
):
call_command("inspectdb", stdout=out)
output = out.getvalue()
self.assertIn("# Unable to inspect table 'nonexistent'", output)
# The error message depends on the backend
self.assertIn("# The error was:", output)
class InspectDBTransactionalTests(TransactionTestCase):
available_apps = ["inspectdb"]
def test_include_views(self):
"""inspectdb --include-views creates models for database views."""
with connection.cursor() as cursor:
cursor.execute(
"CREATE VIEW inspectdb_people_view AS "
"SELECT id, name FROM inspectdb_people"
)
out = StringIO()
view_model = "class InspectdbPeopleView(models.Model):"
view_managed = "managed = False # Created from a view."
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
stdout=out,
)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
include_views=True,
stdout=out,
)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP VIEW inspectdb_people_view")
@skipUnlessDBFeature("can_introspect_materialized_views")
def test_include_materialized_views(self):
"""inspectdb --include-views creates models for materialized views."""
with connection.cursor() as cursor:
cursor.execute(
"CREATE MATERIALIZED VIEW inspectdb_people_materialized AS "
"SELECT id, name FROM inspectdb_people"
)
out = StringIO()
view_model = "class InspectdbPeopleMaterialized(models.Model):"
view_managed = "managed = False # Created from a view."
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
stdout=out,
)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
include_views=True,
stdout=out,
)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP MATERIALIZED VIEW inspectdb_people_materialized")
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_include_partitions(self):
"""inspectdb --include-partitions creates models for partitions."""
with connection.cursor() as cursor:
cursor.execute(
"""\
CREATE TABLE inspectdb_partition_parent (name text not null)
PARTITION BY LIST (left(upper(name), 1))
"""
)
cursor.execute(
"""\
CREATE TABLE inspectdb_partition_child
PARTITION OF inspectdb_partition_parent
FOR VALUES IN ('A', 'B', 'C')
"""
)
out = StringIO()
partition_model_parent = "class InspectdbPartitionParent(models.Model):"
partition_model_child = "class InspectdbPartitionChild(models.Model):"
partition_managed = "managed = False # Created from a partition."
try:
call_command(
"inspectdb", table_name_filter=inspectdb_tables_only, stdout=out
)
no_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, no_partitions_output)
self.assertNotIn(partition_model_child, no_partitions_output)
self.assertNotIn(partition_managed, no_partitions_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_tables_only,
include_partitions=True,
stdout=out,
)
with_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, with_partitions_output)
self.assertIn(partition_model_child, with_partitions_output)
self.assertIn(partition_managed, with_partitions_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP TABLE IF EXISTS inspectdb_partition_child")
cursor.execute("DROP TABLE IF EXISTS inspectdb_partition_parent")
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_foreign_data_wrapper(self):
with connection.cursor() as cursor:
cursor.execute("CREATE EXTENSION IF NOT EXISTS file_fdw")
cursor.execute(
"CREATE SERVER inspectdb_server FOREIGN DATA WRAPPER file_fdw"
)
cursor.execute(
"""\
CREATE FOREIGN TABLE inspectdb_iris_foreign_table (
petal_length real,
petal_width real,
sepal_length real,
sepal_width real
) SERVER inspectdb_server OPTIONS (
filename %s
)
""",
[os.devnull],
)
out = StringIO()
foreign_table_model = "class InspectdbIrisForeignTable(models.Model):"
foreign_table_managed = "managed = False"
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_tables_only,
stdout=out,
)
output = out.getvalue()
self.assertIn(foreign_table_model, output)
self.assertIn(foreign_table_managed, output)
finally:
with connection.cursor() as cursor:
cursor.execute(
"DROP FOREIGN TABLE IF EXISTS inspectdb_iris_foreign_table"
)
cursor.execute("DROP SERVER IF EXISTS inspectdb_server")
cursor.execute("DROP EXTENSION IF EXISTS file_fdw")
|
5ad85da1c50ca3118d343f5099cec33455de509adfb2f58747e41880f9420348 | from django.db import connection, models
class People(models.Model):
name = models.CharField(max_length=255)
parent = models.ForeignKey("self", models.CASCADE)
class Message(models.Model):
from_field = models.ForeignKey(People, models.CASCADE, db_column="from_id")
class PeopleData(models.Model):
people_pk = models.ForeignKey(People, models.CASCADE, primary_key=True)
ssn = models.CharField(max_length=11)
class PeopleMoreData(models.Model):
people_unique = models.ForeignKey(People, models.CASCADE, unique=True)
message = models.ForeignKey(Message, models.CASCADE, blank=True, null=True)
license = models.CharField(max_length=255)
class ForeignKeyToField(models.Model):
to_field_fk = models.ForeignKey(
PeopleMoreData,
models.CASCADE,
to_field="people_unique",
)
class DigitsInColumnName(models.Model):
all_digits = models.CharField(max_length=11, db_column="123")
leading_digit = models.CharField(max_length=11, db_column="4extra")
leading_digits = models.CharField(max_length=11, db_column="45extra")
class SpecialName(models.Model):
field = models.IntegerField(db_column="field")
# Underscores
field_field_0 = models.IntegerField(db_column="Field_")
field_field_1 = models.IntegerField(db_column="Field__")
field_field_2 = models.IntegerField(db_column="__field")
# Other chars
prc_x = models.IntegerField(db_column="prc(%) x")
non_ascii = models.IntegerField(db_column="tamaño")
class Meta:
db_table = "inspectdb_special.table name"
class ColumnTypes(models.Model):
id = models.AutoField(primary_key=True)
big_int_field = models.BigIntegerField()
bool_field = models.BooleanField(default=False)
null_bool_field = models.BooleanField(null=True)
char_field = models.CharField(max_length=10)
null_char_field = models.CharField(max_length=10, blank=True, null=True)
date_field = models.DateField()
date_time_field = models.DateTimeField()
decimal_field = models.DecimalField(max_digits=6, decimal_places=1)
email_field = models.EmailField()
file_field = models.FileField(upload_to="unused")
file_path_field = models.FilePathField()
float_field = models.FloatField()
int_field = models.IntegerField()
gen_ip_address_field = models.GenericIPAddressField(protocol="ipv4")
pos_big_int_field = models.PositiveBigIntegerField()
pos_int_field = models.PositiveIntegerField()
pos_small_int_field = models.PositiveSmallIntegerField()
slug_field = models.SlugField()
small_int_field = models.SmallIntegerField()
text_field = models.TextField()
time_field = models.TimeField()
url_field = models.URLField()
uuid_field = models.UUIDField()
class JSONFieldColumnType(models.Model):
json_field = models.JSONField()
null_json_field = models.JSONField(blank=True, null=True)
class Meta:
required_db_features = {
"can_introspect_json_field",
"supports_json_field",
}
test_collation = connection.features.test_collations.get("non_default")
class CharFieldDbCollation(models.Model):
char_field = models.CharField(max_length=10, db_collation=test_collation)
class Meta:
required_db_features = {"supports_collation_on_charfield"}
class TextFieldDbCollation(models.Model):
text_field = models.TextField(db_collation=test_collation)
class Meta:
required_db_features = {"supports_collation_on_textfield"}
class UniqueTogether(models.Model):
field1 = models.IntegerField()
field2 = models.CharField(max_length=10)
from_field = models.IntegerField(db_column="from")
non_unique = models.IntegerField(db_column="non__unique_column")
non_unique_0 = models.IntegerField(db_column="non_unique__column")
class Meta:
unique_together = [
("field1", "field2"),
("from_field", "field1"),
("non_unique", "non_unique_0"),
]
|
bbc9cdc994ed57766ba23e30e0176696a70022293f3ab95990cfc932fc655ee6 | from django.test import SimpleTestCase, override_settings
from django.test.utils import require_jinja2
@override_settings(ROOT_URLCONF="shortcuts.urls")
class RenderTests(SimpleTestCase):
def test_render(self):
response = self.client.get("/render/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"FOO.BAR../render/\n")
self.assertEqual(response.headers["Content-Type"], "text/html; charset=utf-8")
self.assertFalse(hasattr(response.context.request, "current_app"))
def test_render_with_multiple_templates(self):
response = self.client.get("/render/multiple_templates/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"FOO.BAR../render/multiple_templates/\n")
def test_render_with_content_type(self):
response = self.client.get("/render/content_type/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"FOO.BAR../render/content_type/\n")
self.assertEqual(response.headers["Content-Type"], "application/x-rendertest")
def test_render_with_status(self):
response = self.client.get("/render/status/")
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b"FOO.BAR../render/status/\n")
@require_jinja2
def test_render_with_using(self):
response = self.client.get("/render/using/")
self.assertEqual(response.content, b"DTL\n")
response = self.client.get("/render/using/?using=django")
self.assertEqual(response.content, b"DTL\n")
response = self.client.get("/render/using/?using=jinja2")
self.assertEqual(response.content, b"Jinja2\n")
|
35fbfceff990d86c3e0ef2030052904f6e53cf336fabcba18051592bf6cd8501 | from django.urls import path
from . import views
urlpatterns = [
path("render/", views.render_view),
path("render/multiple_templates/", views.render_view_with_multiple_templates),
path("render/content_type/", views.render_view_with_content_type),
path("render/status/", views.render_view_with_status),
path("render/using/", views.render_view_with_using),
]
|
b4ce4f9637b810e65283d29f6d82921543ba31a4fc12bc95db4d9d2ab0fd71c4 | from django.shortcuts import render
def render_view(request):
return render(
request,
"shortcuts/render_test.html",
{
"foo": "FOO",
"bar": "BAR",
},
)
def render_view_with_multiple_templates(request):
return render(
request,
[
"shortcuts/no_such_template.html",
"shortcuts/render_test.html",
],
{
"foo": "FOO",
"bar": "BAR",
},
)
def render_view_with_content_type(request):
return render(
request,
"shortcuts/render_test.html",
{
"foo": "FOO",
"bar": "BAR",
},
content_type="application/x-rendertest",
)
def render_view_with_status(request):
return render(
request,
"shortcuts/render_test.html",
{
"foo": "FOO",
"bar": "BAR",
},
status=403,
)
def render_view_with_using(request):
using = request.GET.get("using")
return render(request, "shortcuts/using.html", using=using)
|
d3b8dd29c13491b8ab6c94569bee254b88503d9dc582b4c495c39e6ef0b4b4f3 | import sys
import traceback
from io import BytesIO
from unittest import TestCase, mock
from wsgiref import simple_server
from django.core.servers.basehttp import get_internal_wsgi_application
from django.core.signals import request_finished
from django.test import RequestFactory, override_settings
from .views import FILE_RESPONSE_HOLDER
# If data is too large, socket will choke, so write chunks no larger than 32MB
# at a time. The rationale behind the 32MB can be found in #5596#comment:4.
MAX_SOCKET_CHUNK_SIZE = 32 * 1024 * 1024 # 32 MB
class ServerHandler(simple_server.ServerHandler):
error_status = "500 INTERNAL SERVER ERROR"
def write(self, data):
"""'write()' callable as specified by PEP 3333"""
assert isinstance(data, bytes), "write() argument must be bytestring"
if not self.status:
raise AssertionError("write() before start_response()")
elif not self.headers_sent:
# Before the first output, send the stored headers
self.bytes_sent = len(data) # make sure we know content-length
self.send_headers()
else:
self.bytes_sent += len(data)
# XXX check Content-Length and truncate if too many bytes written?
data = BytesIO(data)
for chunk in iter(lambda: data.read(MAX_SOCKET_CHUNK_SIZE), b""):
self._write(chunk)
self._flush()
def error_output(self, environ, start_response):
super().error_output(environ, start_response)
return ["\n".join(traceback.format_exception(*sys.exc_info()))]
class DummyHandler:
def log_request(self, *args, **kwargs):
pass
class FileWrapperHandler(ServerHandler):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.request_handler = DummyHandler()
self._used_sendfile = False
def sendfile(self):
self._used_sendfile = True
return True
def wsgi_app(environ, start_response):
start_response("200 OK", [("Content-Type", "text/plain")])
return [b"Hello World!"]
def wsgi_app_file_wrapper(environ, start_response):
start_response("200 OK", [("Content-Type", "text/plain")])
return environ["wsgi.file_wrapper"](BytesIO(b"foo"))
class WSGIFileWrapperTests(TestCase):
"""
The wsgi.file_wrapper works for the builtin server.
Tests for #9659: wsgi.file_wrapper in the builtin server.
We need to mock a couple of handlers and keep track of what
gets called when using a couple kinds of WSGI apps.
"""
def test_file_wrapper_uses_sendfile(self):
env = {"SERVER_PROTOCOL": "HTTP/1.0"}
handler = FileWrapperHandler(None, BytesIO(), BytesIO(), env)
handler.run(wsgi_app_file_wrapper)
self.assertTrue(handler._used_sendfile)
self.assertEqual(handler.stdout.getvalue(), b"")
self.assertEqual(handler.stderr.getvalue(), b"")
def test_file_wrapper_no_sendfile(self):
env = {"SERVER_PROTOCOL": "HTTP/1.0"}
handler = FileWrapperHandler(None, BytesIO(), BytesIO(), env)
handler.run(wsgi_app)
self.assertFalse(handler._used_sendfile)
self.assertEqual(handler.stdout.getvalue().splitlines()[-1], b"Hello World!")
self.assertEqual(handler.stderr.getvalue(), b"")
@override_settings(ROOT_URLCONF="builtin_server.urls")
def test_file_response_closing(self):
"""
View returning a FileResponse properly closes the file and http
response when file_wrapper is used.
"""
env = RequestFactory().get("/fileresponse/").environ
handler = FileWrapperHandler(None, BytesIO(), BytesIO(), env)
handler.run(get_internal_wsgi_application())
# Sendfile is used only when file_wrapper has been used.
self.assertTrue(handler._used_sendfile)
# Fetch the original response object.
self.assertIn("response", FILE_RESPONSE_HOLDER)
response = FILE_RESPONSE_HOLDER["response"]
# The response and file buffers are closed.
self.assertIs(response.closed, True)
buf1, buf2 = FILE_RESPONSE_HOLDER["buffers"]
self.assertIs(buf1.closed, True)
self.assertIs(buf2.closed, True)
FILE_RESPONSE_HOLDER.clear()
@override_settings(ROOT_URLCONF="builtin_server.urls")
def test_file_response_call_request_finished(self):
env = RequestFactory().get("/fileresponse/").environ
handler = FileWrapperHandler(None, BytesIO(), BytesIO(), env)
with mock.MagicMock() as signal_handler:
request_finished.connect(signal_handler)
handler.run(get_internal_wsgi_application())
self.assertEqual(signal_handler.call_count, 1)
class WriteChunkCounterHandler(ServerHandler):
"""
Server handler that counts the number of chunks written after headers were
sent. Used to make sure large response body chunking works properly.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.request_handler = DummyHandler()
self.headers_written = False
self.write_chunk_counter = 0
def send_headers(self):
super().send_headers()
self.headers_written = True
def _write(self, data):
if self.headers_written:
self.write_chunk_counter += 1
self.stdout.write(data)
def send_big_data_app(environ, start_response):
start_response("200 OK", [("Content-Type", "text/plain")])
# Return a blob of data that is 1.5 times the maximum chunk size.
return [b"x" * (MAX_SOCKET_CHUNK_SIZE + MAX_SOCKET_CHUNK_SIZE // 2)]
class ServerHandlerChunksProperly(TestCase):
"""
The ServerHandler chunks data properly.
Tests for #18972: The logic that performs the math to break data into
32MB (MAX_SOCKET_CHUNK_SIZE) chunks was flawed, BUT it didn't actually
cause any problems.
"""
def test_chunked_data(self):
env = {"SERVER_PROTOCOL": "HTTP/1.0"}
handler = WriteChunkCounterHandler(None, BytesIO(), BytesIO(), env)
handler.run(send_big_data_app)
self.assertEqual(handler.write_chunk_counter, 2)
|
ecc2e4dfa322235a057a0d62be1721db2f44a1456e9d4f132f28ee8eaf295cb4 | from django.urls import path
from . import views
urlpatterns = [
path("fileresponse/", views.file_response),
]
|
d6a1767f739b1917dd3dd088579993e62bfbe4b6f5ba30e574f1a75f534be4be | from io import BytesIO
from django.http import FileResponse
FILE_RESPONSE_HOLDER = {}
def file_response(request):
f1 = BytesIO(b"test1")
f2 = BytesIO(b"test2")
response = FileResponse(f1)
response._resource_closers.append(f2.close)
FILE_RESPONSE_HOLDER["response"] = response
FILE_RESPONSE_HOLDER["buffers"] = (f1, f2)
return response
|
e9c3370f7a4f18baeb8ad253dab8abbcb2917453c3cfbdde70ed474abc359667 | from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core import management
from django.test import TestCase, override_settings
from .models import Article
class SwappableModelTests(TestCase):
# Limit memory usage when calling 'migrate'.
available_apps = [
"swappable_models",
"django.contrib.auth",
"django.contrib.contenttypes",
]
@override_settings(TEST_ARTICLE_MODEL="swappable_models.AlternateArticle")
def test_generated_data(self):
"Permissions and content types are not created for a swapped model"
# Delete all permissions and content_types
Permission.objects.filter(content_type__app_label="swappable_models").delete()
ContentType.objects.filter(app_label="swappable_models").delete()
# Re-run migrate. This will re-build the permissions and content types.
management.call_command("migrate", interactive=False, verbosity=0)
# Content types and permissions exist for the swapped model,
# but not for the swappable model.
apps_models = [
(p.content_type.app_label, p.content_type.model)
for p in Permission.objects.all()
]
self.assertIn(("swappable_models", "alternatearticle"), apps_models)
self.assertNotIn(("swappable_models", "article"), apps_models)
apps_models = [(ct.app_label, ct.model) for ct in ContentType.objects.all()]
self.assertIn(("swappable_models", "alternatearticle"), apps_models)
self.assertNotIn(("swappable_models", "article"), apps_models)
@override_settings(TEST_ARTICLE_MODEL="swappable_models.article")
def test_case_insensitive(self):
"Model names are case insensitive. Model swapping honors this."
Article.objects.all()
self.assertIsNone(Article._meta.swapped)
|
b0befc6c12f9ca988cf7050b4c0030ac1b4350e41901b1d5938d997bacaf7c0a | from django.db import models
class Article(models.Model):
title = models.CharField(max_length=100)
publication_date = models.DateField()
class Meta:
swappable = "TEST_ARTICLE_MODEL"
class AlternateArticle(models.Model):
title = models.CharField(max_length=100)
publication_date = models.DateField()
byline = models.CharField(max_length=100)
|
e13e64a8ae3226957f5ff13dcf40c575fc22670e73fb6653abced03a2cb1ba35 | from django.db import IntegrityError, connection, transaction
from django.test import TestCase
from .models import (
Bar,
Director,
Favorites,
HiddenPointer,
ManualPrimaryKey,
MultiModel,
Place,
Pointer,
RelatedModel,
Restaurant,
School,
Target,
ToFieldPointer,
UndergroundBar,
Waiter,
)
class OneToOneTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.p1 = Place.objects.create(name="Demon Dogs", address="944 W. Fullerton")
cls.p2 = Place.objects.create(name="Ace Hardware", address="1013 N. Ashland")
cls.r1 = Restaurant.objects.create(
place=cls.p1, serves_hot_dogs=True, serves_pizza=False
)
cls.b1 = Bar.objects.create(place=cls.p1, serves_cocktails=False)
def test_getter(self):
# A Restaurant can access its place.
self.assertEqual(repr(self.r1.place), "<Place: Demon Dogs the place>")
# A Place can access its restaurant, if available.
self.assertEqual(
repr(self.p1.restaurant), "<Restaurant: Demon Dogs the restaurant>"
)
# p2 doesn't have an associated restaurant.
with self.assertRaisesMessage(
Restaurant.DoesNotExist, "Place has no restaurant"
):
self.p2.restaurant
# The exception raised on attribute access when a related object
# doesn't exist should be an instance of a subclass of `AttributeError`
# refs #21563
self.assertFalse(hasattr(self.p2, "restaurant"))
def test_setter(self):
# Set the place using assignment notation. Because place is the primary
# key on Restaurant, the save will create a new restaurant
self.r1.place = self.p2
self.r1.save()
self.assertEqual(
repr(self.p2.restaurant), "<Restaurant: Ace Hardware the restaurant>"
)
self.assertEqual(repr(self.r1.place), "<Place: Ace Hardware the place>")
self.assertEqual(self.p2.pk, self.r1.pk)
# Set the place back again, using assignment in the reverse direction.
self.p1.restaurant = self.r1
self.assertEqual(
repr(self.p1.restaurant), "<Restaurant: Demon Dogs the restaurant>"
)
r = Restaurant.objects.get(pk=self.p1.id)
self.assertEqual(repr(r.place), "<Place: Demon Dogs the place>")
def test_manager_all(self):
# Restaurant.objects.all() just returns the Restaurants, not the Places.
self.assertSequenceEqual(Restaurant.objects.all(), [self.r1])
# Place.objects.all() returns all Places, regardless of whether they
# have Restaurants.
self.assertSequenceEqual(Place.objects.order_by("name"), [self.p2, self.p1])
def test_manager_get(self):
def assert_get_restaurant(**params):
self.assertEqual(
repr(Restaurant.objects.get(**params)),
"<Restaurant: Demon Dogs the restaurant>",
)
assert_get_restaurant(place__id__exact=self.p1.pk)
assert_get_restaurant(place__id=self.p1.pk)
assert_get_restaurant(place__exact=self.p1.pk)
assert_get_restaurant(place__exact=self.p1)
assert_get_restaurant(place=self.p1.pk)
assert_get_restaurant(place=self.p1)
assert_get_restaurant(pk=self.p1.pk)
assert_get_restaurant(place__pk__exact=self.p1.pk)
assert_get_restaurant(place__pk=self.p1.pk)
assert_get_restaurant(place__name__startswith="Demon")
def assert_get_place(**params):
self.assertEqual(
repr(Place.objects.get(**params)), "<Place: Demon Dogs the place>"
)
assert_get_place(restaurant__place__exact=self.p1.pk)
assert_get_place(restaurant__place__exact=self.p1)
assert_get_place(restaurant__place__pk=self.p1.pk)
assert_get_place(restaurant__exact=self.p1.pk)
assert_get_place(restaurant__exact=self.r1)
assert_get_place(restaurant__pk=self.p1.pk)
assert_get_place(restaurant=self.p1.pk)
assert_get_place(restaurant=self.r1)
assert_get_place(id__exact=self.p1.pk)
assert_get_place(pk=self.p1.pk)
def test_foreign_key(self):
# Add a Waiter to the Restaurant.
w = self.r1.waiter_set.create(name="Joe")
self.assertEqual(
repr(w), "<Waiter: Joe the waiter at Demon Dogs the restaurant>"
)
# Query the waiters
def assert_filter_waiters(**params):
self.assertSequenceEqual(Waiter.objects.filter(**params), [w])
assert_filter_waiters(restaurant__place__exact=self.p1.pk)
assert_filter_waiters(restaurant__place__exact=self.p1)
assert_filter_waiters(restaurant__place__pk=self.p1.pk)
assert_filter_waiters(restaurant__exact=self.r1.pk)
assert_filter_waiters(restaurant__exact=self.r1)
assert_filter_waiters(restaurant__pk=self.r1.pk)
assert_filter_waiters(restaurant=self.r1.pk)
assert_filter_waiters(restaurant=self.r1)
assert_filter_waiters(id__exact=w.pk)
assert_filter_waiters(pk=w.pk)
# Delete the restaurant; the waiter should also be removed
r = Restaurant.objects.get(pk=self.r1.pk)
r.delete()
self.assertEqual(Waiter.objects.count(), 0)
def test_multiple_o2o(self):
# One-to-one fields still work if you create your own primary key
o1 = ManualPrimaryKey(primary_key="abc123", name="primary")
o1.save()
o2 = RelatedModel(link=o1, name="secondary")
o2.save()
# You can have multiple one-to-one fields on a model, too.
x1 = MultiModel(link1=self.p1, link2=o1, name="x1")
x1.save()
self.assertEqual(repr(o1.multimodel), "<MultiModel: Multimodel x1>")
# This will fail because each one-to-one field must be unique (and
# link2=o1 was used for x1, above).
mm = MultiModel(link1=self.p2, link2=o1, name="x1")
with self.assertRaises(IntegrityError):
with transaction.atomic():
mm.save()
def test_unsaved_object(self):
"""
#10811 -- Assigning an unsaved object to a OneToOneField
should raise an exception.
"""
place = Place(name="User", address="London")
with self.assertRaises(Restaurant.DoesNotExist):
place.restaurant
msg = (
"save() prohibited to prevent data loss due to unsaved related object "
"'place'."
)
with self.assertRaisesMessage(ValueError, msg):
Restaurant.objects.create(
place=place, serves_hot_dogs=True, serves_pizza=False
)
# place should not cache restaurant
with self.assertRaises(Restaurant.DoesNotExist):
place.restaurant
def test_reverse_relationship_cache_cascade(self):
"""
Regression test for #9023: accessing the reverse relationship shouldn't
result in a cascading delete().
"""
bar = UndergroundBar.objects.create(place=self.p1, serves_cocktails=False)
# The bug in #9023: if you access the one-to-one relation *before*
# setting to None and deleting, the cascade happens anyway.
self.p1.undergroundbar
bar.place.name = "foo"
bar.place = None
bar.save()
self.p1.delete()
self.assertEqual(Place.objects.count(), 1)
self.assertEqual(UndergroundBar.objects.count(), 1)
def test_create_models_m2m(self):
"""
Models are created via the m2m relation if the remote model has a
OneToOneField (#1064, #1506).
"""
f = Favorites(name="Fred")
f.save()
f.restaurants.set([self.r1])
self.assertSequenceEqual(f.restaurants.all(), [self.r1])
def test_reverse_object_cache(self):
"""
The name of the cache for the reverse object is correct (#7173).
"""
self.assertEqual(self.p1.restaurant, self.r1)
self.assertEqual(self.p1.bar, self.b1)
def test_assign_none_reverse_relation(self):
p = Place.objects.get(name="Demon Dogs")
# Assigning None succeeds if field is null=True.
ug_bar = UndergroundBar.objects.create(place=p, serves_cocktails=False)
p.undergroundbar = None
self.assertIsNone(ug_bar.place)
ug_bar.save()
ug_bar.refresh_from_db()
self.assertIsNone(ug_bar.place)
def test_assign_none_null_reverse_relation(self):
p = Place.objects.get(name="Demon Dogs")
# Assigning None doesn't throw AttributeError if there isn't a related
# UndergroundBar.
p.undergroundbar = None
def test_assign_none_to_null_cached_reverse_relation(self):
p = Place.objects.get(name="Demon Dogs")
# Prime the relation's cache with a value of None.
with self.assertRaises(Place.undergroundbar.RelatedObjectDoesNotExist):
getattr(p, "undergroundbar")
# Assigning None works if there isn't a related UndergroundBar and the
# reverse cache has a value of None.
p.undergroundbar = None
def test_assign_o2o_id_value(self):
b = UndergroundBar.objects.create(place=self.p1)
b.place_id = self.p2.pk
b.save()
self.assertEqual(b.place_id, self.p2.pk)
self.assertFalse(UndergroundBar.place.is_cached(b))
self.assertEqual(b.place, self.p2)
self.assertTrue(UndergroundBar.place.is_cached(b))
# Reassigning the same value doesn't clear a cached instance.
b.place_id = self.p2.pk
self.assertTrue(UndergroundBar.place.is_cached(b))
def test_assign_o2o_id_none(self):
b = UndergroundBar.objects.create(place=self.p1)
b.place_id = None
b.save()
self.assertIsNone(b.place_id)
self.assertFalse(UndergroundBar.place.is_cached(b))
self.assertIsNone(b.place)
self.assertTrue(UndergroundBar.place.is_cached(b))
def test_related_object_cache(self):
"""Regression test for #6886 (the related-object cache)"""
# Look up the objects again so that we get "fresh" objects
p = Place.objects.get(name="Demon Dogs")
r = p.restaurant
# Accessing the related object again returns the exactly same object
self.assertIs(p.restaurant, r)
# But if we kill the cache, we get a new object
del p._state.fields_cache["restaurant"]
self.assertIsNot(p.restaurant, r)
# Reassigning the Restaurant object results in an immediate cache update
# We can't use a new Restaurant because that'll violate one-to-one, but
# with a new *instance* the is test below will fail if #6886 regresses.
r2 = Restaurant.objects.get(pk=r.pk)
p.restaurant = r2
self.assertIs(p.restaurant, r2)
# Assigning None succeeds if field is null=True.
ug_bar = UndergroundBar.objects.create(place=p, serves_cocktails=False)
ug_bar.place = None
self.assertIsNone(ug_bar.place)
# Assigning None will not fail: Place.restaurant is null=False
setattr(p, "restaurant", None)
# You also can't assign an object of the wrong type here
msg = (
'Cannot assign "<Place: Demon Dogs the place>": '
'"Place.restaurant" must be a "Restaurant" instance.'
)
with self.assertRaisesMessage(ValueError, msg):
setattr(p, "restaurant", p)
# Creation using keyword argument should cache the related object.
p = Place.objects.get(name="Demon Dogs")
r = Restaurant(place=p)
self.assertIs(r.place, p)
# Creation using keyword argument and unsaved related instance (#8070).
p = Place()
r = Restaurant(place=p)
self.assertIs(r.place, p)
# Creation using attname keyword argument and an id will cause the related
# object to be fetched.
p = Place.objects.get(name="Demon Dogs")
r = Restaurant(place_id=p.id)
self.assertIsNot(r.place, p)
self.assertEqual(r.place, p)
def test_filter_one_to_one_relations(self):
"""
Regression test for #9968
filtering reverse one-to-one relations with primary_key=True was
misbehaving. We test both (primary_key=True & False) cases here to
prevent any reappearance of the problem.
"""
target = Target.objects.create()
self.assertSequenceEqual(Target.objects.filter(pointer=None), [target])
self.assertSequenceEqual(Target.objects.exclude(pointer=None), [])
self.assertSequenceEqual(Target.objects.filter(second_pointer=None), [target])
self.assertSequenceEqual(Target.objects.exclude(second_pointer=None), [])
def test_o2o_primary_key_delete(self):
t = Target.objects.create(name="name")
Pointer.objects.create(other=t)
num_deleted, objs = Pointer.objects.filter(other__name="name").delete()
self.assertEqual(num_deleted, 1)
self.assertEqual(objs, {"one_to_one.Pointer": 1})
def test_save_nullable_o2o_after_parent(self):
place = Place(name="Rose tattoo")
bar = UndergroundBar(place=place)
place.save()
bar.save()
bar.refresh_from_db()
self.assertEqual(bar.place, place)
def test_reverse_object_does_not_exist_cache(self):
"""
Regression for #13839 and #17439.
DoesNotExist on a reverse one-to-one relation is cached.
"""
p = Place(name="Zombie Cats", address="Not sure")
p.save()
with self.assertNumQueries(1):
with self.assertRaises(Restaurant.DoesNotExist):
p.restaurant
with self.assertNumQueries(0):
with self.assertRaises(Restaurant.DoesNotExist):
p.restaurant
def test_reverse_object_cached_when_related_is_accessed(self):
"""
Regression for #13839 and #17439.
The target of a one-to-one relation is cached
when the origin is accessed through the reverse relation.
"""
# Use a fresh object without caches
r = Restaurant.objects.get(pk=self.r1.pk)
p = r.place
with self.assertNumQueries(0):
self.assertEqual(p.restaurant, r)
def test_related_object_cached_when_reverse_is_accessed(self):
"""
Regression for #13839 and #17439.
The origin of a one-to-one relation is cached
when the target is accessed through the reverse relation.
"""
# Use a fresh object without caches
p = Place.objects.get(pk=self.p1.pk)
r = p.restaurant
with self.assertNumQueries(0):
self.assertEqual(r.place, p)
def test_reverse_object_cached_when_related_is_set(self):
"""
Regression for #13839 and #17439.
The target of a one-to-one relation is always cached.
"""
p = Place(name="Zombie Cats", address="Not sure")
p.save()
self.r1.place = p
self.r1.save()
with self.assertNumQueries(0):
self.assertEqual(p.restaurant, self.r1)
def test_reverse_object_cached_when_related_is_unset(self):
"""
Regression for #13839 and #17439.
The target of a one-to-one relation is always cached.
"""
b = UndergroundBar(place=self.p1, serves_cocktails=True)
b.save()
with self.assertNumQueries(0):
self.assertEqual(self.p1.undergroundbar, b)
b.place = None
b.save()
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
self.p1.undergroundbar
def test_get_reverse_on_unsaved_object(self):
"""
Regression for #18153 and #19089.
Accessing the reverse relation on an unsaved object
always raises an exception.
"""
p = Place()
# When there's no instance of the origin of the one-to-one
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
p.undergroundbar
UndergroundBar.objects.create()
# When there's one instance of the origin
# (p.undergroundbar used to return that instance)
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
p.undergroundbar
# Several instances of the origin are only possible if database allows
# inserting multiple NULL rows for a unique constraint
if connection.features.supports_nullable_unique_constraints:
UndergroundBar.objects.create()
# When there are several instances of the origin
with self.assertNumQueries(0):
with self.assertRaises(UndergroundBar.DoesNotExist):
p.undergroundbar
def test_set_reverse_on_unsaved_object(self):
"""
Writing to the reverse relation on an unsaved object
is impossible too.
"""
p = Place()
b = UndergroundBar.objects.create()
# Assigning a reverse relation on an unsaved object is allowed.
p.undergroundbar = b
# However saving the object is not allowed.
msg = (
"save() prohibited to prevent data loss due to unsaved related object "
"'place'."
)
with self.assertNumQueries(0):
with self.assertRaisesMessage(ValueError, msg):
b.save()
def test_nullable_o2o_delete(self):
u = UndergroundBar.objects.create(place=self.p1)
u.place_id = None
u.save()
self.p1.delete()
self.assertTrue(UndergroundBar.objects.filter(pk=u.pk).exists())
self.assertIsNone(UndergroundBar.objects.get(pk=u.pk).place)
def test_hidden_accessor(self):
"""
When a '+' ending related name is specified no reverse accessor should
be added to the related model.
"""
self.assertFalse(
hasattr(
Target,
HiddenPointer._meta.get_field(
"target"
).remote_field.get_accessor_name(),
)
)
def test_related_object(self):
public_school = School.objects.create(is_public=True)
public_director = Director.objects.create(school=public_school, is_temp=False)
private_school = School.objects.create(is_public=False)
private_director = Director.objects.create(school=private_school, is_temp=True)
# Only one school is available via all() due to the custom default manager.
self.assertSequenceEqual(School.objects.all(), [public_school])
# Only one director is available via all() due to the custom default manager.
self.assertSequenceEqual(Director.objects.all(), [public_director])
self.assertEqual(public_director.school, public_school)
self.assertEqual(public_school.director, public_director)
# Make sure the base manager is used so that the related objects
# is still accessible even if the default manager doesn't normally
# allow it.
self.assertEqual(private_director.school, private_school)
# Make sure the base manager is used so that an student can still access
# its related school even if the default manager doesn't normally
# allow it.
self.assertEqual(private_school.director, private_director)
School._meta.base_manager_name = "objects"
School._meta._expire_cache()
try:
private_director = Director._base_manager.get(pk=private_director.pk)
with self.assertRaises(School.DoesNotExist):
private_director.school
finally:
School._meta.base_manager_name = None
School._meta._expire_cache()
Director._meta.base_manager_name = "objects"
Director._meta._expire_cache()
try:
private_school = School._base_manager.get(pk=private_school.pk)
with self.assertRaises(Director.DoesNotExist):
private_school.director
finally:
Director._meta.base_manager_name = None
Director._meta._expire_cache()
def test_hasattr_related_object(self):
# The exception raised on attribute access when a related object
# doesn't exist should be an instance of a subclass of `AttributeError`
# refs #21563
self.assertFalse(hasattr(Director(), "director"))
self.assertFalse(hasattr(School(), "school"))
def test_update_one_to_one_pk(self):
p1 = Place.objects.create()
p2 = Place.objects.create()
r1 = Restaurant.objects.create(place=p1)
r2 = Restaurant.objects.create(place=p2)
w = Waiter.objects.create(restaurant=r1)
Waiter.objects.update(restaurant=r2)
w.refresh_from_db()
self.assertEqual(w.restaurant, r2)
def test_rel_pk_subquery(self):
r = Restaurant.objects.first()
q1 = Restaurant.objects.filter(place_id=r.pk)
# Subquery using primary key and a query against the
# same model works correctly.
q2 = Restaurant.objects.filter(place_id__in=q1)
self.assertSequenceEqual(q2, [r])
# Subquery using 'pk__in' instead of 'place_id__in' work, too.
q2 = Restaurant.objects.filter(
pk__in=Restaurant.objects.filter(place__id=r.place.pk)
)
self.assertSequenceEqual(q2, [r])
q3 = Restaurant.objects.filter(place__in=Place.objects.all())
self.assertSequenceEqual(q3, [r])
q4 = Restaurant.objects.filter(place__in=Place.objects.filter(id=r.pk))
self.assertSequenceEqual(q4, [r])
def test_rel_pk_exact(self):
r = Restaurant.objects.first()
r2 = Restaurant.objects.filter(pk__exact=r).first()
self.assertEqual(r, r2)
def test_primary_key_to_field_filter(self):
target = Target.objects.create(name="foo")
pointer = ToFieldPointer.objects.create(target=target)
self.assertSequenceEqual(
ToFieldPointer.objects.filter(target=target), [pointer]
)
self.assertSequenceEqual(
ToFieldPointer.objects.filter(pk__exact=pointer), [pointer]
)
def test_cached_relation_invalidated_on_save(self):
"""
Model.save() invalidates stale OneToOneField relations after a primary
key assignment.
"""
self.assertEqual(self.b1.place, self.p1) # caches b1.place
self.b1.place_id = self.p2.pk
self.b1.save()
self.assertEqual(self.b1.place, self.p2)
|
522275056e0bc4f3c6d3d5de59089895a57f346803a18f50a658d8510f180769 | """
One-to-one relationships
To define a one-to-one relationship, use ``OneToOneField()``.
In this example, a ``Place`` optionally can be a ``Restaurant``.
"""
from django.db import models
class Place(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=80)
def __str__(self):
return "%s the place" % self.name
class Restaurant(models.Model):
place = models.OneToOneField(Place, models.CASCADE, primary_key=True)
serves_hot_dogs = models.BooleanField(default=False)
serves_pizza = models.BooleanField(default=False)
def __str__(self):
return "%s the restaurant" % self.place.name
class Bar(models.Model):
place = models.OneToOneField(Place, models.CASCADE)
serves_cocktails = models.BooleanField(default=True)
class UndergroundBar(models.Model):
place = models.OneToOneField(Place, models.SET_NULL, null=True)
serves_cocktails = models.BooleanField(default=True)
class Waiter(models.Model):
restaurant = models.ForeignKey(Restaurant, models.CASCADE)
name = models.CharField(max_length=50)
def __str__(self):
return "%s the waiter at %s" % (self.name, self.restaurant)
class Favorites(models.Model):
name = models.CharField(max_length=50)
restaurants = models.ManyToManyField(Restaurant)
class ManualPrimaryKey(models.Model):
primary_key = models.CharField(max_length=10, primary_key=True)
name = models.CharField(max_length=50)
class RelatedModel(models.Model):
link = models.OneToOneField(ManualPrimaryKey, models.CASCADE)
name = models.CharField(max_length=50)
class MultiModel(models.Model):
link1 = models.OneToOneField(Place, models.CASCADE)
link2 = models.OneToOneField(ManualPrimaryKey, models.CASCADE)
name = models.CharField(max_length=50)
def __str__(self):
return "Multimodel %s" % self.name
class Target(models.Model):
name = models.CharField(max_length=50, unique=True)
class Pointer(models.Model):
other = models.OneToOneField(Target, models.CASCADE, primary_key=True)
class Pointer2(models.Model):
other = models.OneToOneField(Target, models.CASCADE, related_name="second_pointer")
class HiddenPointer(models.Model):
target = models.OneToOneField(Target, models.CASCADE, related_name="hidden+")
class ToFieldPointer(models.Model):
target = models.OneToOneField(
Target, models.CASCADE, to_field="name", primary_key=True
)
# Test related objects visibility.
class SchoolManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(is_public=True)
class School(models.Model):
is_public = models.BooleanField(default=False)
objects = SchoolManager()
class DirectorManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(is_temp=False)
class Director(models.Model):
is_temp = models.BooleanField(default=False)
school = models.OneToOneField(School, models.CASCADE)
objects = DirectorManager()
|
206ba77e932dd6c4ea30345880e71e4f6881b775e992e303f0c91a5bb8511c02 | import datetime
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings
from django.views.generic.base import View
from .models import Artist, Author, Book, Page
@override_settings(ROOT_URLCONF="generic_views.urls")
class ListViewTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.artist1 = Artist.objects.create(name="Rene Magritte")
cls.author1 = Author.objects.create(
name="Roberto Bolaño", slug="roberto-bolano"
)
cls.author2 = Author.objects.create(
name="Scott Rosenberg", slug="scott-rosenberg"
)
cls.book1 = Book.objects.create(
name="2066", slug="2066", pages=800, pubdate=datetime.date(2008, 10, 1)
)
cls.book1.authors.add(cls.author1)
cls.book2 = Book.objects.create(
name="Dreaming in Code",
slug="dreaming-in-code",
pages=300,
pubdate=datetime.date(2006, 5, 1),
)
cls.page1 = Page.objects.create(
content="I was once bitten by a moose.",
template="generic_views/page_template.html",
)
def test_items(self):
res = self.client.get("/list/dict/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/list.html")
self.assertEqual(res.context["object_list"][0]["first"], "John")
def test_queryset(self):
res = self.client.get("/list/authors/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/author_list.html")
self.assertEqual(list(res.context["object_list"]), list(Author.objects.all()))
self.assertIsInstance(res.context["view"], View)
self.assertIs(res.context["author_list"], res.context["object_list"])
self.assertIsNone(res.context["paginator"])
self.assertIsNone(res.context["page_obj"])
self.assertFalse(res.context["is_paginated"])
def test_paginated_queryset(self):
self._make_authors(100)
res = self.client.get("/list/authors/paginated/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/author_list.html")
self.assertEqual(len(res.context["object_list"]), 30)
self.assertIs(res.context["author_list"], res.context["object_list"])
self.assertTrue(res.context["is_paginated"])
self.assertEqual(res.context["page_obj"].number, 1)
self.assertEqual(res.context["paginator"].num_pages, 4)
self.assertEqual(res.context["author_list"][0].name, "Author 00")
self.assertEqual(list(res.context["author_list"])[-1].name, "Author 29")
def test_paginated_queryset_shortdata(self):
# Short datasets also result in a paginated view.
res = self.client.get("/list/authors/paginated/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/author_list.html")
self.assertEqual(list(res.context["object_list"]), list(Author.objects.all()))
self.assertIs(res.context["author_list"], res.context["object_list"])
self.assertEqual(res.context["page_obj"].number, 1)
self.assertEqual(res.context["paginator"].num_pages, 1)
self.assertFalse(res.context["is_paginated"])
def test_paginated_get_page_by_query_string(self):
self._make_authors(100)
res = self.client.get("/list/authors/paginated/", {"page": "2"})
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/author_list.html")
self.assertEqual(len(res.context["object_list"]), 30)
self.assertIs(res.context["author_list"], res.context["object_list"])
self.assertEqual(res.context["author_list"][0].name, "Author 30")
self.assertEqual(res.context["page_obj"].number, 2)
def test_paginated_get_last_page_by_query_string(self):
self._make_authors(100)
res = self.client.get("/list/authors/paginated/", {"page": "last"})
self.assertEqual(res.status_code, 200)
self.assertEqual(len(res.context["object_list"]), 10)
self.assertIs(res.context["author_list"], res.context["object_list"])
self.assertEqual(res.context["author_list"][0].name, "Author 90")
self.assertEqual(res.context["page_obj"].number, 4)
def test_paginated_get_page_by_urlvar(self):
self._make_authors(100)
res = self.client.get("/list/authors/paginated/3/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/author_list.html")
self.assertEqual(len(res.context["object_list"]), 30)
self.assertIs(res.context["author_list"], res.context["object_list"])
self.assertEqual(res.context["author_list"][0].name, "Author 60")
self.assertEqual(res.context["page_obj"].number, 3)
def test_paginated_page_out_of_range(self):
self._make_authors(100)
res = self.client.get("/list/authors/paginated/42/")
self.assertEqual(res.status_code, 404)
def test_paginated_invalid_page(self):
self._make_authors(100)
res = self.client.get("/list/authors/paginated/?page=frog")
self.assertEqual(res.status_code, 404)
def test_paginated_custom_paginator_class(self):
self._make_authors(7)
res = self.client.get("/list/authors/paginated/custom_class/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["paginator"].num_pages, 1)
# Custom pagination allows for 2 orphans on a page size of 5
self.assertEqual(len(res.context["object_list"]), 7)
def test_paginated_custom_page_kwarg(self):
self._make_authors(100)
res = self.client.get(
"/list/authors/paginated/custom_page_kwarg/", {"pagina": "2"}
)
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/author_list.html")
self.assertEqual(len(res.context["object_list"]), 30)
self.assertIs(res.context["author_list"], res.context["object_list"])
self.assertEqual(res.context["author_list"][0].name, "Author 30")
self.assertEqual(res.context["page_obj"].number, 2)
def test_paginated_custom_paginator_constructor(self):
self._make_authors(7)
res = self.client.get("/list/authors/paginated/custom_constructor/")
self.assertEqual(res.status_code, 200)
# Custom pagination allows for 2 orphans on a page size of 5
self.assertEqual(len(res.context["object_list"]), 7)
def test_paginated_orphaned_queryset(self):
self._make_authors(92)
res = self.client.get("/list/authors/paginated-orphaned/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["page_obj"].number, 1)
res = self.client.get("/list/authors/paginated-orphaned/", {"page": "last"})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["page_obj"].number, 3)
res = self.client.get("/list/authors/paginated-orphaned/", {"page": "3"})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["page_obj"].number, 3)
res = self.client.get("/list/authors/paginated-orphaned/", {"page": "4"})
self.assertEqual(res.status_code, 404)
def test_paginated_non_queryset(self):
res = self.client.get("/list/dict/paginated/")
self.assertEqual(res.status_code, 200)
self.assertEqual(len(res.context["object_list"]), 1)
def test_verbose_name(self):
res = self.client.get("/list/artists/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/list.html")
self.assertEqual(list(res.context["object_list"]), list(Artist.objects.all()))
self.assertIs(res.context["artist_list"], res.context["object_list"])
self.assertIsNone(res.context["paginator"])
self.assertIsNone(res.context["page_obj"])
self.assertFalse(res.context["is_paginated"])
def test_allow_empty_false(self):
res = self.client.get("/list/authors/notempty/")
self.assertEqual(res.status_code, 200)
Author.objects.all().delete()
res = self.client.get("/list/authors/notempty/")
self.assertEqual(res.status_code, 404)
def test_template_name(self):
res = self.client.get("/list/authors/template_name/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["object_list"]), list(Author.objects.all()))
self.assertIs(res.context["author_list"], res.context["object_list"])
self.assertTemplateUsed(res, "generic_views/list.html")
def test_template_name_suffix(self):
res = self.client.get("/list/authors/template_name_suffix/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["object_list"]), list(Author.objects.all()))
self.assertIs(res.context["author_list"], res.context["object_list"])
self.assertTemplateUsed(res, "generic_views/author_objects.html")
def test_context_object_name(self):
res = self.client.get("/list/authors/context_object_name/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["object_list"]), list(Author.objects.all()))
self.assertNotIn("authors", res.context)
self.assertIs(res.context["author_list"], res.context["object_list"])
self.assertTemplateUsed(res, "generic_views/author_list.html")
def test_duplicate_context_object_name(self):
res = self.client.get("/list/authors/dupe_context_object_name/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["object_list"]), list(Author.objects.all()))
self.assertNotIn("authors", res.context)
self.assertNotIn("author_list", res.context)
self.assertTemplateUsed(res, "generic_views/author_list.html")
def test_missing_items(self):
msg = (
"AuthorList is missing a QuerySet. Define AuthorList.model, "
"AuthorList.queryset, or override AuthorList.get_queryset()."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/list/authors/invalid/")
def test_invalid_get_queryset(self):
msg = (
"AuthorListGetQuerysetReturnsNone requires either a 'template_name' "
"attribute or a get_queryset() method that returns a QuerySet."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/list/authors/get_queryset/")
def test_paginated_list_view_does_not_load_entire_table(self):
# Regression test for #17535
self._make_authors(3)
# 1 query for authors
with self.assertNumQueries(1):
self.client.get("/list/authors/notempty/")
# same as above + 1 query to test if authors exist + 1 query for pagination
with self.assertNumQueries(3):
self.client.get("/list/authors/notempty/paginated/")
def test_explicitly_ordered_list_view(self):
Book.objects.create(
name="Zebras for Dummies", pages=800, pubdate=datetime.date(2006, 9, 1)
)
res = self.client.get("/list/books/sorted/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object_list"][0].name, "2066")
self.assertEqual(res.context["object_list"][1].name, "Dreaming in Code")
self.assertEqual(res.context["object_list"][2].name, "Zebras for Dummies")
res = self.client.get("/list/books/sortedbypagesandnamedec/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object_list"][0].name, "Dreaming in Code")
self.assertEqual(res.context["object_list"][1].name, "Zebras for Dummies")
self.assertEqual(res.context["object_list"][2].name, "2066")
@override_settings(DEBUG=True)
def test_paginated_list_view_returns_useful_message_on_invalid_page(self):
# test for #19240
# tests that source exception's message is included in page
self._make_authors(1)
res = self.client.get("/list/authors/paginated/2/")
self.assertEqual(res.status_code, 404)
self.assertEqual(
res.context.get("reason"), "Invalid page (2): That page contains no results"
)
def _make_authors(self, n):
Author.objects.all().delete()
for i in range(n):
Author.objects.create(name="Author %02i" % i, slug="a%s" % i)
|
dcaf98b0c5f242b9f5f4b6fb86e71003b91e976a2ffb78e0884d13e2a4284992 | import datetime
from unittest import mock
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings, skipUnlessDBFeature
from django.test.utils import requires_tz_support
from django.utils import timezone
from .models import Artist, Author, Book, BookSigning, Page
def _make_books(n, base_date):
for i in range(n):
Book.objects.create(
name="Book %d" % i,
slug="book-%d" % i,
pages=100 + i,
pubdate=base_date - datetime.timedelta(days=i),
)
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.artist1 = Artist.objects.create(name="Rene Magritte")
cls.author1 = Author.objects.create(
name="Roberto Bolaño", slug="roberto-bolano"
)
cls.author2 = Author.objects.create(
name="Scott Rosenberg", slug="scott-rosenberg"
)
cls.book1 = Book.objects.create(
name="2066", slug="2066", pages=800, pubdate=datetime.date(2008, 10, 1)
)
cls.book1.authors.add(cls.author1)
cls.book2 = Book.objects.create(
name="Dreaming in Code",
slug="dreaming-in-code",
pages=300,
pubdate=datetime.date(2006, 5, 1),
)
cls.page1 = Page.objects.create(
content="I was once bitten by a moose.",
template="generic_views/page_template.html",
)
@override_settings(ROOT_URLCONF="generic_views.urls")
class ArchiveIndexViewTests(TestDataMixin, TestCase):
def test_archive_view(self):
res = self.client.get("/dates/books/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(list(res.context["latest"]), list(Book.objects.all()))
self.assertTemplateUsed(res, "generic_views/book_archive.html")
def test_archive_view_context_object_name(self):
res = self.client.get("/dates/books/context_object_name/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(list(res.context["thingies"]), list(Book.objects.all()))
self.assertNotIn("latest", res.context)
self.assertTemplateUsed(res, "generic_views/book_archive.html")
def test_empty_archive_view(self):
Book.objects.all().delete()
res = self.client.get("/dates/books/")
self.assertEqual(res.status_code, 404)
def test_allow_empty_archive_view(self):
Book.objects.all().delete()
res = self.client.get("/dates/books/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [])
self.assertTemplateUsed(res, "generic_views/book_archive.html")
def test_archive_view_template(self):
res = self.client.get("/dates/books/template_name/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(list(res.context["latest"]), list(Book.objects.all()))
self.assertTemplateUsed(res, "generic_views/list.html")
def test_archive_view_template_suffix(self):
res = self.client.get("/dates/books/template_name_suffix/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(list(res.context["latest"]), list(Book.objects.all()))
self.assertTemplateUsed(res, "generic_views/book_detail.html")
def test_archive_view_invalid(self):
msg = (
"BookArchive is missing a QuerySet. Define BookArchive.model, "
"BookArchive.queryset, or override BookArchive.get_queryset()."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/dates/books/invalid/")
def test_archive_view_by_month(self):
res = self.client.get("/dates/books/by_month/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "month", "DESC")),
)
def test_paginated_archive_view(self):
_make_books(20, base_date=datetime.date.today())
res = self.client.get("/dates/books/paginated/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(list(res.context["latest"]), list(Book.objects.all()[0:10]))
self.assertTemplateUsed(res, "generic_views/book_archive.html")
res = self.client.get("/dates/books/paginated/?page=2")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["page_obj"].number, 2)
self.assertEqual(list(res.context["latest"]), list(Book.objects.all()[10:20]))
def test_paginated_archive_view_does_not_load_entire_table(self):
# Regression test for #18087
_make_books(20, base_date=datetime.date.today())
# 1 query for years list + 1 query for books
with self.assertNumQueries(2):
self.client.get("/dates/books/")
# same as above + 1 query to test if books exist + 1 query to count them
with self.assertNumQueries(4):
self.client.get("/dates/books/paginated/")
def test_no_duplicate_query(self):
# Regression test for #18354
with self.assertNumQueries(2):
self.client.get("/dates/books/reverse/")
def test_datetime_archive_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get("/dates/booksignings/")
self.assertEqual(res.status_code, 200)
@requires_tz_support
@skipUnlessDBFeature("has_zoneinfo_database")
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_archive_view(self):
BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc)
)
res = self.client.get("/dates/booksignings/")
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted descending in index"""
_make_books(5, base_date=datetime.date(2011, 12, 25))
res = self.client.get("/dates/books/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(reversed(sorted(res.context["date_list"]))),
)
def test_archive_view_custom_sorting(self):
Book.objects.create(
name="Zebras for Dummies", pages=600, pubdate=datetime.date(2007, 5, 1)
)
res = self.client.get("/dates/books/sortedbyname/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(
list(res.context["latest"]), list(Book.objects.order_by("name").all())
)
self.assertTemplateUsed(res, "generic_views/book_archive.html")
def test_archive_view_custom_sorting_dec(self):
Book.objects.create(
name="Zebras for Dummies", pages=600, pubdate=datetime.date(2007, 5, 1)
)
res = self.client.get("/dates/books/sortedbynamedec/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
list(Book.objects.dates("pubdate", "year", "DESC")),
)
self.assertEqual(
list(res.context["latest"]), list(Book.objects.order_by("-name").all())
)
self.assertTemplateUsed(res, "generic_views/book_archive.html")
def test_archive_view_without_date_field(self):
msg = "BookArchiveWithoutDateField.date_field is required."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/dates/books/without_date_field/")
@override_settings(ROOT_URLCONF="generic_views.urls")
class YearArchiveViewTests(TestDataMixin, TestCase):
def test_year_view(self):
res = self.client.get("/dates/books/2008/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [datetime.date(2008, 10, 1)])
self.assertEqual(res.context["year"], datetime.date(2008, 1, 1))
self.assertTemplateUsed(res, "generic_views/book_archive_year.html")
# Since allow_empty=False, next/prev years must be valid (#7164)
self.assertIsNone(res.context["next_year"])
self.assertEqual(res.context["previous_year"], datetime.date(2006, 1, 1))
def test_year_view_make_object_list(self):
res = self.client.get("/dates/books/2006/make_object_list/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [datetime.date(2006, 5, 1)])
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__year=2006)),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__year=2006)),
)
self.assertTemplateUsed(res, "generic_views/book_archive_year.html")
def test_year_view_empty(self):
res = self.client.get("/dates/books/1999/")
self.assertEqual(res.status_code, 404)
res = self.client.get("/dates/books/1999/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [])
self.assertEqual(list(res.context["book_list"]), [])
# Since allow_empty=True, next/prev are allowed to be empty years (#7164)
self.assertEqual(res.context["next_year"], datetime.date(2000, 1, 1))
self.assertEqual(res.context["previous_year"], datetime.date(1998, 1, 1))
def test_year_view_allow_future(self):
# Create a new book in the future
year = datetime.date.today().year + 1
Book.objects.create(
name="The New New Testement", pages=600, pubdate=datetime.date(year, 1, 1)
)
res = self.client.get("/dates/books/%s/" % year)
self.assertEqual(res.status_code, 404)
res = self.client.get("/dates/books/%s/allow_empty/" % year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["book_list"]), [])
res = self.client.get("/dates/books/%s/allow_future/" % year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [datetime.date(year, 1, 1)])
def test_year_view_paginated(self):
res = self.client.get("/dates/books/2006/paginated/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__year=2006)),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__year=2006)),
)
self.assertTemplateUsed(res, "generic_views/book_archive_year.html")
def test_year_view_custom_sort_order(self):
# Zebras comes after Dreaming by name, but before on '-pubdate' which
# is the default sorting.
Book.objects.create(
name="Zebras for Dummies", pages=600, pubdate=datetime.date(2006, 9, 1)
)
res = self.client.get("/dates/books/2006/sortedbyname/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
[datetime.date(2006, 5, 1), datetime.date(2006, 9, 1)],
)
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__year=2006).order_by("name")),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__year=2006).order_by("name")),
)
self.assertTemplateUsed(res, "generic_views/book_archive_year.html")
def test_year_view_two_custom_sort_orders(self):
Book.objects.create(
name="Zebras for Dummies", pages=300, pubdate=datetime.date(2006, 9, 1)
)
Book.objects.create(
name="Hunting Hippos", pages=400, pubdate=datetime.date(2006, 3, 1)
)
res = self.client.get("/dates/books/2006/sortedbypageandnamedec/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["date_list"]),
[
datetime.date(2006, 3, 1),
datetime.date(2006, 5, 1),
datetime.date(2006, 9, 1),
],
)
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__year=2006).order_by("pages", "-name")),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__year=2006).order_by("pages", "-name")),
)
self.assertTemplateUsed(res, "generic_views/book_archive_year.html")
def test_year_view_invalid_pattern(self):
res = self.client.get("/dates/books/no_year/")
self.assertEqual(res.status_code, 404)
def test_no_duplicate_query(self):
# Regression test for #18354
with self.assertNumQueries(4):
self.client.get("/dates/books/2008/reverse/")
def test_datetime_year_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get("/dates/booksignings/2008/")
self.assertEqual(res.status_code, 200)
@skipUnlessDBFeature("has_zoneinfo_database")
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_year_view(self):
BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc)
)
res = self.client.get("/dates/booksignings/2008/")
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted ascending in year view"""
_make_books(10, base_date=datetime.date(2011, 12, 25))
res = self.client.get("/dates/books/2011/")
self.assertEqual(
list(res.context["date_list"]), list(sorted(res.context["date_list"]))
)
@mock.patch("django.views.generic.list.MultipleObjectMixin.get_context_data")
def test_get_context_data_receives_extra_context(self, mock):
"""
MultipleObjectMixin.get_context_data() receives the context set by
BaseYearArchiveView.get_dated_items(). This behavior is implemented in
BaseDateListView.get().
"""
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
with self.assertRaisesMessage(
TypeError, "context must be a dict rather than MagicMock."
):
self.client.get("/dates/booksignings/2008/")
args, kwargs = mock.call_args
# These are context values from get_dated_items().
self.assertEqual(kwargs["year"], datetime.date(2008, 1, 1))
self.assertIsNone(kwargs["previous_year"])
self.assertIsNone(kwargs["next_year"])
def test_get_dated_items_not_implemented(self):
msg = "A DateView must provide an implementation of get_dated_items()"
with self.assertRaisesMessage(NotImplementedError, msg):
self.client.get("/BaseDateListViewTest/")
@override_settings(ROOT_URLCONF="generic_views.urls")
class MonthArchiveViewTests(TestDataMixin, TestCase):
def test_month_view(self):
res = self.client.get("/dates/books/2008/oct/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/book_archive_month.html")
self.assertEqual(list(res.context["date_list"]), [datetime.date(2008, 10, 1)])
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate=datetime.date(2008, 10, 1))),
)
self.assertEqual(res.context["month"], datetime.date(2008, 10, 1))
# Since allow_empty=False, next/prev months must be valid (#7164)
self.assertIsNone(res.context["next_month"])
self.assertEqual(res.context["previous_month"], datetime.date(2006, 5, 1))
def test_month_view_allow_empty(self):
# allow_empty = False, empty month
res = self.client.get("/dates/books/2000/jan/")
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get("/dates/books/2000/jan/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["date_list"]), [])
self.assertEqual(list(res.context["book_list"]), [])
self.assertEqual(res.context["month"], datetime.date(2000, 1, 1))
# Since allow_empty=True, next/prev are allowed to be empty months (#7164)
self.assertEqual(res.context["next_month"], datetime.date(2000, 2, 1))
self.assertEqual(res.context["previous_month"], datetime.date(1999, 12, 1))
# allow_empty but not allow_future: next_month should be empty (#7164)
url = datetime.date.today().strftime("/dates/books/%Y/%b/allow_empty/").lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertIsNone(res.context["next_month"])
def test_month_view_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60)).replace(day=1)
urlbit = future.strftime("%Y/%b").lower()
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
# allow_future = False, future month
res = self.client.get("/dates/books/%s/" % urlbit)
self.assertEqual(res.status_code, 404)
# allow_future = True, valid future month
res = self.client.get("/dates/books/%s/allow_future/" % urlbit)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["date_list"][0], b.pubdate)
self.assertEqual(list(res.context["book_list"]), [b])
self.assertEqual(res.context["month"], future)
# Since allow_future = True but not allow_empty, next/prev are not
# allowed to be empty months (#7164)
self.assertIsNone(res.context["next_month"])
self.assertEqual(res.context["previous_month"], datetime.date(2008, 10, 1))
# allow_future, but not allow_empty, with a current month. So next
# should be in the future (yup, #7164, again)
res = self.client.get("/dates/books/2008/oct/allow_future/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["next_month"], future)
self.assertEqual(res.context["previous_month"], datetime.date(2006, 5, 1))
def test_month_view_paginated(self):
res = self.client.get("/dates/books/2008/oct/paginated/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__year=2008, pubdate__month=10)),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__year=2008, pubdate__month=10)),
)
self.assertTemplateUsed(res, "generic_views/book_archive_month.html")
def test_custom_month_format(self):
res = self.client.get("/dates/books/2008/10/")
self.assertEqual(res.status_code, 200)
def test_month_view_invalid_pattern(self):
res = self.client.get("/dates/books/2007/no_month/")
self.assertEqual(res.status_code, 404)
def test_previous_month_without_content(self):
"Content can exist on any day of the previous month. Refs #14711"
self.pubdate_list = [
datetime.date(2010, month, day) for month, day in ((9, 1), (10, 2), (11, 3))
]
for pubdate in self.pubdate_list:
name = str(pubdate)
Book.objects.create(name=name, slug=name, pages=100, pubdate=pubdate)
res = self.client.get("/dates/books/2010/nov/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["previous_month"], datetime.date(2010, 10, 1))
# The following test demonstrates the bug
res = self.client.get("/dates/books/2010/nov/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["previous_month"], datetime.date(2010, 10, 1))
# The bug does not occur here because a Book with pubdate of Sep 1 exists
res = self.client.get("/dates/books/2010/oct/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["previous_month"], datetime.date(2010, 9, 1))
def test_datetime_month_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 2, 1, 12, 0))
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
BookSigning.objects.create(event_date=datetime.datetime(2008, 6, 3, 12, 0))
res = self.client.get("/dates/booksignings/2008/apr/")
self.assertEqual(res.status_code, 200)
def test_month_view_get_month_from_request(self):
oct1 = datetime.date(2008, 10, 1)
res = self.client.get("/dates/books/without_month/2008/?month=oct")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/book_archive_month.html")
self.assertEqual(list(res.context["date_list"]), [oct1])
self.assertEqual(
list(res.context["book_list"]), list(Book.objects.filter(pubdate=oct1))
)
self.assertEqual(res.context["month"], oct1)
def test_month_view_without_month_in_url(self):
res = self.client.get("/dates/books/without_month/2008/")
self.assertEqual(res.status_code, 404)
self.assertEqual(res.context["exception"], "No month specified")
@skipUnlessDBFeature("has_zoneinfo_database")
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_month_view(self):
BookSigning.objects.create(
event_date=datetime.datetime(2008, 2, 1, 12, 0, tzinfo=timezone.utc)
)
BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc)
)
BookSigning.objects.create(
event_date=datetime.datetime(2008, 6, 3, 12, 0, tzinfo=timezone.utc)
)
res = self.client.get("/dates/booksignings/2008/apr/")
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted ascending in month view"""
_make_books(10, base_date=datetime.date(2011, 12, 25))
res = self.client.get("/dates/books/2011/dec/")
self.assertEqual(
list(res.context["date_list"]), list(sorted(res.context["date_list"]))
)
@override_settings(ROOT_URLCONF="generic_views.urls")
class WeekArchiveViewTests(TestDataMixin, TestCase):
def test_week_view(self):
res = self.client.get("/dates/books/2008/week/39/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/book_archive_week.html")
self.assertEqual(
res.context["book_list"][0],
Book.objects.get(pubdate=datetime.date(2008, 10, 1)),
)
self.assertEqual(res.context["week"], datetime.date(2008, 9, 28))
# Since allow_empty=False, next/prev weeks must be valid
self.assertIsNone(res.context["next_week"])
self.assertEqual(res.context["previous_week"], datetime.date(2006, 4, 30))
def test_week_view_allow_empty(self):
# allow_empty = False, empty week
res = self.client.get("/dates/books/2008/week/12/")
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get("/dates/books/2008/week/12/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["book_list"]), [])
self.assertEqual(res.context["week"], datetime.date(2008, 3, 23))
# Since allow_empty=True, next/prev are allowed to be empty weeks
self.assertEqual(res.context["next_week"], datetime.date(2008, 3, 30))
self.assertEqual(res.context["previous_week"], datetime.date(2008, 3, 16))
# allow_empty but not allow_future: next_week should be empty
url = (
datetime.date.today()
.strftime("/dates/books/%Y/week/%U/allow_empty/")
.lower()
)
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertIsNone(res.context["next_week"])
def test_week_view_allow_future(self):
# January 7th always falls in week 1, given Python's definition of week numbers
future = datetime.date(datetime.date.today().year + 1, 1, 7)
future_sunday = future - datetime.timedelta(days=(future.weekday() + 1) % 7)
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
res = self.client.get("/dates/books/%s/week/1/" % future.year)
self.assertEqual(res.status_code, 404)
res = self.client.get("/dates/books/%s/week/1/allow_future/" % future.year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["book_list"]), [b])
self.assertEqual(res.context["week"], future_sunday)
# Since allow_future = True but not allow_empty, next/prev are not
# allowed to be empty weeks
self.assertIsNone(res.context["next_week"])
self.assertEqual(res.context["previous_week"], datetime.date(2008, 9, 28))
# allow_future, but not allow_empty, with a current week. So next
# should be in the future
res = self.client.get("/dates/books/2008/week/39/allow_future/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["next_week"], future_sunday)
self.assertEqual(res.context["previous_week"], datetime.date(2006, 4, 30))
def test_week_view_paginated(self):
week_start = datetime.date(2008, 9, 28)
week_end = week_start + datetime.timedelta(days=7)
res = self.client.get("/dates/books/2008/week/39/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate__gte=week_start, pubdate__lt=week_end)),
)
self.assertEqual(
list(res.context["object_list"]),
list(Book.objects.filter(pubdate__gte=week_start, pubdate__lt=week_end)),
)
self.assertTemplateUsed(res, "generic_views/book_archive_week.html")
def test_week_view_invalid_pattern(self):
res = self.client.get("/dates/books/2007/week/no_week/")
self.assertEqual(res.status_code, 404)
def test_week_start_Monday(self):
# Regression for #14752
res = self.client.get("/dates/books/2008/week/39/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["week"], datetime.date(2008, 9, 28))
res = self.client.get("/dates/books/2008/week/39/monday/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["week"], datetime.date(2008, 9, 29))
def test_week_iso_format(self):
res = self.client.get("/dates/books/2008/week/40/iso_format/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/book_archive_week.html")
self.assertEqual(
list(res.context["book_list"]),
[Book.objects.get(pubdate=datetime.date(2008, 10, 1))],
)
self.assertEqual(res.context["week"], datetime.date(2008, 9, 29))
def test_unknown_week_format(self):
msg = "Unknown week format '%T'. Choices are: %U, %V, %W"
with self.assertRaisesMessage(ValueError, msg):
self.client.get("/dates/books/2008/week/39/unknown_week_format/")
def test_incompatible_iso_week_format_view(self):
msg = (
"ISO week directive '%V' is incompatible with the year directive "
"'%Y'. Use the ISO year '%G' instead."
)
with self.assertRaisesMessage(ValueError, msg):
self.client.get("/dates/books/2008/week/40/invalid_iso_week_year_format/")
def test_datetime_week_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get("/dates/booksignings/2008/week/13/")
self.assertEqual(res.status_code, 200)
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_week_view(self):
BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc)
)
res = self.client.get("/dates/booksignings/2008/week/13/")
self.assertEqual(res.status_code, 200)
@override_settings(ROOT_URLCONF="generic_views.urls")
class DayArchiveViewTests(TestDataMixin, TestCase):
def test_day_view(self):
res = self.client.get("/dates/books/2008/oct/01/")
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/book_archive_day.html")
self.assertEqual(
list(res.context["book_list"]),
list(Book.objects.filter(pubdate=datetime.date(2008, 10, 1))),
)
self.assertEqual(res.context["day"], datetime.date(2008, 10, 1))
# Since allow_empty=False, next/prev days must be valid.
self.assertIsNone(res.context["next_day"])
self.assertEqual(res.context["previous_day"], datetime.date(2006, 5, 1))
def test_day_view_allow_empty(self):
# allow_empty = False, empty month
res = self.client.get("/dates/books/2000/jan/1/")
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get("/dates/books/2000/jan/1/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["book_list"]), [])
self.assertEqual(res.context["day"], datetime.date(2000, 1, 1))
# Since it's allow empty, next/prev are allowed to be empty months (#7164)
self.assertEqual(res.context["next_day"], datetime.date(2000, 1, 2))
self.assertEqual(res.context["previous_day"], datetime.date(1999, 12, 31))
# allow_empty but not allow_future: next_month should be empty (#7164)
url = (
datetime.date.today().strftime("/dates/books/%Y/%b/%d/allow_empty/").lower()
)
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertIsNone(res.context["next_day"])
def test_day_view_allow_future(self):
future = datetime.date.today() + datetime.timedelta(days=60)
urlbit = future.strftime("%Y/%b/%d").lower()
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
# allow_future = False, future month
res = self.client.get("/dates/books/%s/" % urlbit)
self.assertEqual(res.status_code, 404)
# allow_future = True, valid future month
res = self.client.get("/dates/books/%s/allow_future/" % urlbit)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context["book_list"]), [b])
self.assertEqual(res.context["day"], future)
# allow_future but not allow_empty, next/prev must be valid
self.assertIsNone(res.context["next_day"])
self.assertEqual(res.context["previous_day"], datetime.date(2008, 10, 1))
# allow_future, but not allow_empty, with a current month.
res = self.client.get("/dates/books/2008/oct/01/allow_future/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["next_day"], future)
self.assertEqual(res.context["previous_day"], datetime.date(2006, 5, 1))
# allow_future for yesterday, next_day is today (#17192)
today = datetime.date.today()
yesterday = today - datetime.timedelta(days=1)
res = self.client.get(
"/dates/books/%s/allow_empty_and_future/"
% yesterday.strftime("%Y/%b/%d").lower()
)
self.assertEqual(res.context["next_day"], today)
def test_day_view_paginated(self):
res = self.client.get("/dates/books/2008/oct/1/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
list(res.context["book_list"]),
list(
Book.objects.filter(
pubdate__year=2008, pubdate__month=10, pubdate__day=1
)
),
)
self.assertEqual(
list(res.context["object_list"]),
list(
Book.objects.filter(
pubdate__year=2008, pubdate__month=10, pubdate__day=1
)
),
)
self.assertTemplateUsed(res, "generic_views/book_archive_day.html")
def test_next_prev_context(self):
res = self.client.get("/dates/books/2008/oct/01/")
self.assertEqual(
res.content, b"Archive for Oct. 1, 2008. Previous day is May 1, 2006\n"
)
def test_custom_month_format(self):
res = self.client.get("/dates/books/2008/10/01/")
self.assertEqual(res.status_code, 200)
def test_day_view_invalid_pattern(self):
res = self.client.get("/dates/books/2007/oct/no_day/")
self.assertEqual(res.status_code, 404)
def test_today_view(self):
res = self.client.get("/dates/books/today/")
self.assertEqual(res.status_code, 404)
res = self.client.get("/dates/books/today/allow_empty/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["day"], datetime.date.today())
def test_datetime_day_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get("/dates/booksignings/2008/apr/2/")
self.assertEqual(res.status_code, 200)
@requires_tz_support
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_day_view(self):
bs = BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc)
)
res = self.client.get("/dates/booksignings/2008/apr/2/")
self.assertEqual(res.status_code, 200)
# 2008-04-02T00:00:00+03:00 (beginning of day) >
# 2008-04-01T22:00:00+00:00 (book signing event date).
bs.event_date = datetime.datetime(2008, 4, 1, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get("/dates/booksignings/2008/apr/2/")
self.assertEqual(res.status_code, 200)
# 2008-04-03T00:00:00+03:00 (end of day) > 2008-04-02T22:00:00+00:00
# (book signing event date).
bs.event_date = datetime.datetime(2008, 4, 2, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get("/dates/booksignings/2008/apr/2/")
self.assertEqual(res.status_code, 404)
@override_settings(ROOT_URLCONF="generic_views.urls")
class DateDetailViewTests(TestDataMixin, TestCase):
def test_date_detail_by_pk(self):
res = self.client.get("/dates/books/2008/oct/01/%s/" % self.book1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.book1)
self.assertEqual(res.context["book"], self.book1)
self.assertTemplateUsed(res, "generic_views/book_detail.html")
def test_date_detail_by_slug(self):
res = self.client.get("/dates/books/2006/may/01/byslug/dreaming-in-code/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["book"], Book.objects.get(slug="dreaming-in-code"))
def test_date_detail_custom_month_format(self):
res = self.client.get("/dates/books/2008/10/01/%s/" % self.book1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["book"], self.book1)
def test_date_detail_allow_future(self):
future = datetime.date.today() + datetime.timedelta(days=60)
urlbit = future.strftime("%Y/%b/%d").lower()
b = Book.objects.create(
name="The New New Testement", slug="new-new", pages=600, pubdate=future
)
res = self.client.get("/dates/books/%s/new-new/" % urlbit)
self.assertEqual(res.status_code, 404)
res = self.client.get("/dates/books/%s/%s/allow_future/" % (urlbit, b.id))
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["book"], b)
self.assertTemplateUsed(res, "generic_views/book_detail.html")
def test_year_out_of_range(self):
urls = [
"/dates/books/9999/",
"/dates/books/9999/12/",
"/dates/books/9999/week/52/",
]
for url in urls:
with self.subTest(url=url):
res = self.client.get(url)
self.assertEqual(res.status_code, 404)
self.assertEqual(res.context["exception"], "Date out of range")
def test_invalid_url(self):
msg = (
"Generic detail view BookDetail must be called with either an "
"object pk or a slug in the URLconf."
)
with self.assertRaisesMessage(AttributeError, msg):
self.client.get("/dates/books/2008/oct/01/nopk/")
def test_get_object_custom_queryset(self):
"""
Custom querysets are used when provided to
BaseDateDetailView.get_object().
"""
res = self.client.get(
"/dates/books/get_object_custom_queryset/2006/may/01/%s/" % self.book2.pk
)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.book2)
self.assertEqual(res.context["book"], self.book2)
self.assertTemplateUsed(res, "generic_views/book_detail.html")
res = self.client.get(
"/dates/books/get_object_custom_queryset/2008/oct/01/9999999/"
)
self.assertEqual(res.status_code, 404)
def test_get_object_custom_queryset_numqueries(self):
with self.assertNumQueries(1):
self.client.get("/dates/books/get_object_custom_queryset/2006/may/01/2/")
def test_datetime_date_detail(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get("/dates/booksignings/2008/apr/2/%d/" % bs.pk)
self.assertEqual(res.status_code, 200)
@requires_tz_support
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Nairobi")
def test_aware_datetime_date_detail(self):
bs = BookSigning.objects.create(
event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc)
)
res = self.client.get("/dates/booksignings/2008/apr/2/%d/" % bs.pk)
self.assertEqual(res.status_code, 200)
# 2008-04-02T00:00:00+03:00 (beginning of day) >
# 2008-04-01T22:00:00+00:00 (book signing event date).
bs.event_date = datetime.datetime(2008, 4, 1, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get("/dates/booksignings/2008/apr/2/%d/" % bs.pk)
self.assertEqual(res.status_code, 200)
# 2008-04-03T00:00:00+03:00 (end of day) > 2008-04-02T22:00:00+00:00
# (book signing event date).
bs.event_date = datetime.datetime(2008, 4, 2, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get("/dates/booksignings/2008/apr/2/%d/" % bs.pk)
self.assertEqual(res.status_code, 404)
|
97d7a39aa1ef65ffba5515367688ee0f8c111b2ab0e9b4764e64dc2d0de22e02 | from django.db import models
from django.db.models import QuerySet
from django.db.models.manager import BaseManager
from django.urls import reverse
class Artist(models.Model):
name = models.CharField(max_length=100)
class Meta:
ordering = ["name"]
verbose_name = "professional artist"
verbose_name_plural = "professional artists"
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse("artist_detail", kwargs={"pk": self.id})
class Author(models.Model):
name = models.CharField(max_length=100)
slug = models.SlugField()
class Meta:
ordering = ["name"]
def __str__(self):
return self.name
class DoesNotExistQuerySet(QuerySet):
def get(self, *args, **kwargs):
raise Author.DoesNotExist
DoesNotExistBookManager = BaseManager.from_queryset(DoesNotExistQuerySet)
class Book(models.Model):
name = models.CharField(max_length=255)
slug = models.SlugField()
pages = models.IntegerField()
authors = models.ManyToManyField(Author)
pubdate = models.DateField()
objects = models.Manager()
does_not_exist = DoesNotExistBookManager()
class Meta:
ordering = ["-pubdate"]
def __str__(self):
return self.name
class Page(models.Model):
content = models.TextField()
template = models.CharField(max_length=255)
class BookSigning(models.Model):
event_date = models.DateTimeField()
|
afac2428a0d1f7e3ecfa64cb78a91873da47144cec8f59eb2e77bcdcbce70ed7 | import datetime
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from django.views.generic.base import View
from django.views.generic.detail import SingleObjectTemplateResponseMixin
from django.views.generic.edit import ModelFormMixin
from .models import Artist, Author, Book, Page
@override_settings(ROOT_URLCONF="generic_views.urls")
class DetailViewTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.artist1 = Artist.objects.create(name="Rene Magritte")
cls.author1 = Author.objects.create(
name="Roberto Bolaño", slug="roberto-bolano"
)
cls.author2 = Author.objects.create(
name="Scott Rosenberg", slug="scott-rosenberg"
)
cls.book1 = Book.objects.create(
name="2066", slug="2066", pages=800, pubdate=datetime.date(2008, 10, 1)
)
cls.book1.authors.add(cls.author1)
cls.book2 = Book.objects.create(
name="Dreaming in Code",
slug="dreaming-in-code",
pages=300,
pubdate=datetime.date(2006, 5, 1),
)
cls.page1 = Page.objects.create(
content="I was once bitten by a moose.",
template="generic_views/page_template.html",
)
def test_simple_object(self):
res = self.client.get("/detail/obj/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], {"foo": "bar"})
self.assertIsInstance(res.context["view"], View)
self.assertTemplateUsed(res, "generic_views/detail.html")
def test_detail_by_pk(self):
res = self.client.get("/detail/author/%s/" % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author1)
self.assertEqual(res.context["author"], self.author1)
self.assertTemplateUsed(res, "generic_views/author_detail.html")
def test_detail_missing_object(self):
res = self.client.get("/detail/author/500/")
self.assertEqual(res.status_code, 404)
def test_detail_object_does_not_exist(self):
with self.assertRaises(ObjectDoesNotExist):
self.client.get("/detail/doesnotexist/1/")
def test_detail_by_custom_pk(self):
res = self.client.get("/detail/author/bycustompk/%s/" % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author1)
self.assertEqual(res.context["author"], self.author1)
self.assertTemplateUsed(res, "generic_views/author_detail.html")
def test_detail_by_slug(self):
res = self.client.get("/detail/author/byslug/scott-rosenberg/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
res.context["object"], Author.objects.get(slug="scott-rosenberg")
)
self.assertEqual(
res.context["author"], Author.objects.get(slug="scott-rosenberg")
)
self.assertTemplateUsed(res, "generic_views/author_detail.html")
def test_detail_by_custom_slug(self):
res = self.client.get("/detail/author/bycustomslug/scott-rosenberg/")
self.assertEqual(res.status_code, 200)
self.assertEqual(
res.context["object"], Author.objects.get(slug="scott-rosenberg")
)
self.assertEqual(
res.context["author"], Author.objects.get(slug="scott-rosenberg")
)
self.assertTemplateUsed(res, "generic_views/author_detail.html")
def test_detail_by_pk_ignore_slug(self):
res = self.client.get(
"/detail/author/bypkignoreslug/%s-roberto-bolano/" % self.author1.pk
)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author1)
self.assertEqual(res.context["author"], self.author1)
self.assertTemplateUsed(res, "generic_views/author_detail.html")
def test_detail_by_pk_ignore_slug_mismatch(self):
res = self.client.get(
"/detail/author/bypkignoreslug/%s-scott-rosenberg/" % self.author1.pk
)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author1)
self.assertEqual(res.context["author"], self.author1)
self.assertTemplateUsed(res, "generic_views/author_detail.html")
def test_detail_by_pk_and_slug(self):
res = self.client.get(
"/detail/author/bypkandslug/%s-roberto-bolano/" % self.author1.pk
)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author1)
self.assertEqual(res.context["author"], self.author1)
self.assertTemplateUsed(res, "generic_views/author_detail.html")
def test_detail_by_pk_and_slug_mismatch_404(self):
res = self.client.get(
"/detail/author/bypkandslug/%s-scott-rosenberg/" % self.author1.pk
)
self.assertEqual(res.status_code, 404)
def test_verbose_name(self):
res = self.client.get("/detail/artist/%s/" % self.artist1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.artist1)
self.assertEqual(res.context["artist"], self.artist1)
self.assertTemplateUsed(res, "generic_views/artist_detail.html")
def test_template_name(self):
res = self.client.get("/detail/author/%s/template_name/" % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author1)
self.assertEqual(res.context["author"], self.author1)
self.assertTemplateUsed(res, "generic_views/about.html")
def test_template_name_suffix(self):
res = self.client.get(
"/detail/author/%s/template_name_suffix/" % self.author1.pk
)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author1)
self.assertEqual(res.context["author"], self.author1)
self.assertTemplateUsed(res, "generic_views/author_view.html")
def test_template_name_field(self):
res = self.client.get("/detail/page/%s/field/" % self.page1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.page1)
self.assertEqual(res.context["page"], self.page1)
self.assertTemplateUsed(res, "generic_views/page_template.html")
def test_context_object_name(self):
res = self.client.get(
"/detail/author/%s/context_object_name/" % self.author1.pk
)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author1)
self.assertEqual(res.context["thingy"], self.author1)
self.assertNotIn("author", res.context)
self.assertTemplateUsed(res, "generic_views/author_detail.html")
def test_duplicated_context_object_name(self):
res = self.client.get(
"/detail/author/%s/dupe_context_object_name/" % self.author1.pk
)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author1)
self.assertNotIn("author", res.context)
self.assertTemplateUsed(res, "generic_views/author_detail.html")
def test_custom_detail(self):
"""
AuthorCustomDetail overrides get() and ensures that
SingleObjectMixin.get_context_object_name() always uses the obj
parameter instead of self.object.
"""
res = self.client.get("/detail/author/%s/custom_detail/" % self.author1.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["custom_author"], self.author1)
self.assertNotIn("author", res.context)
self.assertNotIn("object", res.context)
self.assertTemplateUsed(res, "generic_views/author_detail.html")
def test_deferred_queryset_template_name(self):
class FormContext(SingleObjectTemplateResponseMixin):
request = RequestFactory().get("/")
model = Author
object = Author.objects.defer("name").get(pk=self.author1.pk)
self.assertEqual(
FormContext().get_template_names()[0], "generic_views/author_detail.html"
)
def test_deferred_queryset_context_object_name(self):
class FormContext(ModelFormMixin):
request = RequestFactory().get("/")
model = Author
object = Author.objects.defer("name").get(pk=self.author1.pk)
fields = ("name",)
form_context_data = FormContext().get_context_data()
self.assertEqual(form_context_data["object"], self.author1)
self.assertEqual(form_context_data["author"], self.author1)
def test_invalid_url(self):
with self.assertRaises(AttributeError):
self.client.get("/detail/author/invalid/url/")
def test_invalid_queryset(self):
msg = (
"AuthorDetail is missing a QuerySet. Define AuthorDetail.model, "
"AuthorDetail.queryset, or override AuthorDetail.get_queryset()."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/detail/author/invalid/qs/")
def test_non_model_object_with_meta(self):
res = self.client.get("/detail/nonmodel/1/")
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"].id, "non_model_1")
|
340a2f940cacf32cfd79a51c6623fe10ec2e7cb7b5bc74ee9dcf79a3b89ccd10 | import time
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import require_jinja2
from django.urls import resolve
from django.views.generic import RedirectView, TemplateView, View
from . import views
class SimpleView(View):
"""
A simple view with a docstring.
"""
def get(self, request):
return HttpResponse("This is a simple view")
class SimplePostView(SimpleView):
post = SimpleView.get
class PostOnlyView(View):
def post(self, request):
return HttpResponse("This view only accepts POST")
class CustomizableView(SimpleView):
parameter = {}
def decorator(view):
view.is_decorated = True
return view
class DecoratedDispatchView(SimpleView):
@decorator
def dispatch(self, request, *args, **kwargs):
return super().dispatch(request, *args, **kwargs)
class AboutTemplateView(TemplateView):
def get(self, request):
return self.render_to_response({})
def get_template_names(self):
return ["generic_views/about.html"]
class AboutTemplateAttributeView(TemplateView):
template_name = "generic_views/about.html"
def get(self, request):
return self.render_to_response(context={})
class InstanceView(View):
def get(self, request):
return self
class ViewTest(SimpleTestCase):
rf = RequestFactory()
def _assert_simple(self, response):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"This is a simple view")
def test_no_init_kwargs(self):
"""
A view can't be accidentally instantiated before deployment
"""
msg = "This method is available only on the class, not on instances."
with self.assertRaisesMessage(AttributeError, msg):
SimpleView(key="value").as_view()
def test_no_init_args(self):
"""
A view can't be accidentally instantiated before deployment
"""
msg = "as_view() takes 1 positional argument but 2 were given"
with self.assertRaisesMessage(TypeError, msg):
SimpleView.as_view("value")
def test_pathological_http_method(self):
"""
The edge case of an HTTP request that spoofs an existing method name is
caught.
"""
self.assertEqual(
SimpleView.as_view()(
self.rf.get("/", REQUEST_METHOD="DISPATCH")
).status_code,
405,
)
def test_get_only(self):
"""
Test a view which only allows GET doesn't allow other methods.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get("/")))
self.assertEqual(SimpleView.as_view()(self.rf.post("/")).status_code, 405)
self.assertEqual(
SimpleView.as_view()(self.rf.get("/", REQUEST_METHOD="FAKE")).status_code,
405,
)
def test_get_and_head(self):
"""
Test a view which supplies a GET method also responds correctly to HEAD.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get("/")))
response = SimpleView.as_view()(self.rf.head("/"))
self.assertEqual(response.status_code, 200)
def test_setup_get_and_head(self):
view_instance = SimpleView()
self.assertFalse(hasattr(view_instance, "head"))
view_instance.setup(self.rf.get("/"))
self.assertTrue(hasattr(view_instance, "head"))
self.assertEqual(view_instance.head, view_instance.get)
def test_head_no_get(self):
"""
Test a view which supplies no GET method responds to HEAD with HTTP 405.
"""
response = PostOnlyView.as_view()(self.rf.head("/"))
self.assertEqual(response.status_code, 405)
def test_get_and_post(self):
"""
Test a view which only allows both GET and POST.
"""
self._assert_simple(SimplePostView.as_view()(self.rf.get("/")))
self._assert_simple(SimplePostView.as_view()(self.rf.post("/")))
self.assertEqual(
SimplePostView.as_view()(
self.rf.get("/", REQUEST_METHOD="FAKE")
).status_code,
405,
)
def test_invalid_keyword_argument(self):
"""
View arguments must be predefined on the class and can't
be named like an HTTP method.
"""
msg = (
"The method name %s is not accepted as a keyword argument to "
"SimpleView()."
)
# Check each of the allowed method names
for method in SimpleView.http_method_names:
with self.assertRaisesMessage(TypeError, msg % method):
SimpleView.as_view(**{method: "value"})
# Check the case view argument is ok if predefined on the class...
CustomizableView.as_view(parameter="value")
# ...but raises errors otherwise.
msg = (
"CustomizableView() received an invalid keyword 'foobar'. "
"as_view only accepts arguments that are already attributes of "
"the class."
)
with self.assertRaisesMessage(TypeError, msg):
CustomizableView.as_view(foobar="value")
def test_calling_more_than_once(self):
"""
Test a view can only be called once.
"""
request = self.rf.get("/")
view = InstanceView.as_view()
self.assertNotEqual(view(request), view(request))
def test_class_attributes(self):
"""
The callable returned from as_view() has proper special attributes.
"""
cls = SimpleView
view = cls.as_view()
self.assertEqual(view.__doc__, cls.__doc__)
self.assertEqual(view.__name__, "view")
self.assertEqual(view.__module__, cls.__module__)
self.assertEqual(view.__qualname__, f"{cls.as_view.__qualname__}.<locals>.view")
self.assertEqual(view.__annotations__, cls.dispatch.__annotations__)
self.assertFalse(hasattr(view, "__wrapped__"))
def test_dispatch_decoration(self):
"""
Attributes set by decorators on the dispatch method
are also present on the closure.
"""
self.assertTrue(DecoratedDispatchView.as_view().is_decorated)
def test_options(self):
"""
Views respond to HTTP OPTIONS requests with an Allow header
appropriate for the methods implemented by the view class.
"""
request = self.rf.options("/")
view = SimpleView.as_view()
response = view(request)
self.assertEqual(200, response.status_code)
self.assertTrue(response.headers["Allow"])
def test_options_for_get_view(self):
"""
A view implementing GET allows GET and HEAD.
"""
request = self.rf.options("/")
view = SimpleView.as_view()
response = view(request)
self._assert_allows(response, "GET", "HEAD")
def test_options_for_get_and_post_view(self):
"""
A view implementing GET and POST allows GET, HEAD, and POST.
"""
request = self.rf.options("/")
view = SimplePostView.as_view()
response = view(request)
self._assert_allows(response, "GET", "HEAD", "POST")
def test_options_for_post_view(self):
"""
A view implementing POST allows POST.
"""
request = self.rf.options("/")
view = PostOnlyView.as_view()
response = view(request)
self._assert_allows(response, "POST")
def _assert_allows(self, response, *expected_methods):
"Assert allowed HTTP methods reported in the Allow response header"
response_allows = set(response.headers["Allow"].split(", "))
self.assertEqual(set(expected_methods + ("OPTIONS",)), response_allows)
def test_args_kwargs_request_on_self(self):
"""
Test a view only has args, kwargs & request once `as_view`
has been called.
"""
bare_view = InstanceView()
view = InstanceView.as_view()(self.rf.get("/"))
for attribute in ("args", "kwargs", "request"):
self.assertNotIn(attribute, dir(bare_view))
self.assertIn(attribute, dir(view))
def test_overridden_setup(self):
class SetAttributeMixin:
def setup(self, request, *args, **kwargs):
self.attr = True
super().setup(request, *args, **kwargs)
class CheckSetupView(SetAttributeMixin, SimpleView):
def dispatch(self, request, *args, **kwargs):
assert hasattr(self, "attr")
return super().dispatch(request, *args, **kwargs)
response = CheckSetupView.as_view()(self.rf.get("/"))
self.assertEqual(response.status_code, 200)
def test_not_calling_parent_setup_error(self):
class TestView(View):
def setup(self, request, *args, **kwargs):
pass # Not calling super().setup()
msg = (
"TestView instance has no 'request' attribute. Did you override "
"setup() and forget to call super()?"
)
with self.assertRaisesMessage(AttributeError, msg):
TestView.as_view()(self.rf.get("/"))
def test_setup_adds_args_kwargs_request(self):
request = self.rf.get("/")
args = ("arg 1", "arg 2")
kwargs = {"kwarg_1": 1, "kwarg_2": "year"}
view = View()
view.setup(request, *args, **kwargs)
self.assertEqual(request, view.request)
self.assertEqual(args, view.args)
self.assertEqual(kwargs, view.kwargs)
def test_direct_instantiation(self):
"""
It should be possible to use the view by directly instantiating it
without going through .as_view() (#21564).
"""
view = PostOnlyView()
response = view.dispatch(self.rf.head("/"))
self.assertEqual(response.status_code, 405)
@override_settings(ROOT_URLCONF="generic_views.urls")
class TemplateViewTest(SimpleTestCase):
rf = RequestFactory()
def _assert_about(self, response):
response.render()
self.assertContains(response, "<h1>About</h1>")
def test_get(self):
"""
Test a view that simply renders a template on GET
"""
self._assert_about(AboutTemplateView.as_view()(self.rf.get("/about/")))
def test_head(self):
"""
Test a TemplateView responds correctly to HEAD
"""
response = AboutTemplateView.as_view()(self.rf.head("/about/"))
self.assertEqual(response.status_code, 200)
def test_get_template_attribute(self):
"""
Test a view that renders a template on GET with the template name as
an attribute on the class.
"""
self._assert_about(AboutTemplateAttributeView.as_view()(self.rf.get("/about/")))
def test_get_generic_template(self):
"""
Test a completely generic view that renders a template on GET
with the template name as an argument at instantiation.
"""
self._assert_about(
TemplateView.as_view(template_name="generic_views/about.html")(
self.rf.get("/about/")
)
)
def test_template_name_required(self):
"""
A template view must provide a template name.
"""
msg = (
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/template/no_template/")
@require_jinja2
def test_template_engine(self):
"""
A template view may provide a template engine.
"""
request = self.rf.get("/using/")
view = TemplateView.as_view(template_name="generic_views/using.html")
self.assertEqual(view(request).render().content, b"DTL\n")
view = TemplateView.as_view(
template_name="generic_views/using.html", template_engine="django"
)
self.assertEqual(view(request).render().content, b"DTL\n")
view = TemplateView.as_view(
template_name="generic_views/using.html", template_engine="jinja2"
)
self.assertEqual(view(request).render().content, b"Jinja2\n")
def test_template_params(self):
"""
A generic template view passes kwargs as context.
"""
response = self.client.get("/template/simple/bar/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["foo"], "bar")
self.assertIsInstance(response.context["view"], View)
def test_extra_template_params(self):
"""
A template view can be customized to return extra context.
"""
response = self.client.get("/template/custom/bar/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["foo"], "bar")
self.assertEqual(response.context["key"], "value")
self.assertIsInstance(response.context["view"], View)
def test_cached_views(self):
"""
A template view can be cached
"""
response = self.client.get("/template/cached/bar/")
self.assertEqual(response.status_code, 200)
time.sleep(1.0)
response2 = self.client.get("/template/cached/bar/")
self.assertEqual(response2.status_code, 200)
self.assertEqual(response.content, response2.content)
time.sleep(2.0)
# Let the cache expire and test again
response2 = self.client.get("/template/cached/bar/")
self.assertEqual(response2.status_code, 200)
self.assertNotEqual(response.content, response2.content)
def test_content_type(self):
response = self.client.get("/template/content_type/")
self.assertEqual(response.headers["Content-Type"], "text/plain")
def test_resolve_view(self):
match = resolve("/template/content_type/")
self.assertIs(match.func.view_class, TemplateView)
self.assertEqual(match.func.view_initkwargs["content_type"], "text/plain")
def test_resolve_login_required_view(self):
match = resolve("/template/login_required/")
self.assertIs(match.func.view_class, TemplateView)
def test_extra_context(self):
response = self.client.get("/template/extra_context/")
self.assertEqual(response.context["title"], "Title")
@override_settings(ROOT_URLCONF="generic_views.urls")
class RedirectViewTest(SimpleTestCase):
rf = RequestFactory()
def test_no_url(self):
"Without any configuration, returns HTTP 410 GONE"
response = RedirectView.as_view()(self.rf.get("/foo/"))
self.assertEqual(response.status_code, 410)
def test_default_redirect(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.get("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_permanent_redirect(self):
"Permanent redirects are an option"
response = RedirectView.as_view(url="/bar/", permanent=True)(
self.rf.get("/foo/")
)
self.assertEqual(response.status_code, 301)
self.assertEqual(response.url, "/bar/")
def test_temporary_redirect(self):
"Temporary redirects are an option"
response = RedirectView.as_view(url="/bar/", permanent=False)(
self.rf.get("/foo/")
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_include_args(self):
"GET arguments can be included in the redirected URL"
response = RedirectView.as_view(url="/bar/")(self.rf.get("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
response = RedirectView.as_view(url="/bar/", query_string=True)(
self.rf.get("/foo/?pork=spam")
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/?pork=spam")
def test_include_urlencoded_args(self):
"GET arguments can be URL-encoded when included in the redirected URL"
response = RedirectView.as_view(url="/bar/", query_string=True)(
self.rf.get("/foo/?unicode=%E2%9C%93")
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/?unicode=%E2%9C%93")
def test_parameter_substitution(self):
"Redirection URLs can be parameterized"
response = RedirectView.as_view(url="/bar/%(object_id)d/")(
self.rf.get("/foo/42/"), object_id=42
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/42/")
def test_named_url_pattern(self):
"Named pattern parameter should reverse to the matching pattern"
response = RedirectView.as_view(pattern_name="artist_detail")(
self.rf.get("/foo/"), pk=1
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.headers["Location"], "/detail/artist/1/")
def test_named_url_pattern_using_args(self):
response = RedirectView.as_view(pattern_name="artist_detail")(
self.rf.get("/foo/"), 1
)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.headers["Location"], "/detail/artist/1/")
def test_redirect_POST(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.post("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_HEAD(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.head("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_OPTIONS(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.options("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_PUT(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.put("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_PATCH(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.patch("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_DELETE(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url="/bar/")(self.rf.delete("/foo/"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/bar/")
def test_redirect_when_meta_contains_no_query_string(self):
"regression for #16705"
# we can't use self.rf.get because it always sets QUERY_STRING
response = RedirectView.as_view(url="/bar/")(self.rf.request(PATH_INFO="/foo/"))
self.assertEqual(response.status_code, 302)
def test_direct_instantiation(self):
"""
It should be possible to use the view without going through .as_view()
(#21564).
"""
view = RedirectView()
response = view.dispatch(self.rf.head("/foo/"))
self.assertEqual(response.status_code, 410)
class GetContextDataTest(SimpleTestCase):
def test_get_context_data_super(self):
test_view = views.CustomContextView()
context = test_view.get_context_data(kwarg_test="kwarg_value")
# the test_name key is inserted by the test classes parent
self.assertIn("test_name", context)
self.assertEqual(context["kwarg_test"], "kwarg_value")
self.assertEqual(context["custom_key"], "custom_value")
# test that kwarg overrides values assigned higher up
context = test_view.get_context_data(test_name="test_value")
self.assertEqual(context["test_name"], "test_value")
def test_object_at_custom_name_in_context_data(self):
# Checks 'pony' key presence in dict returned by get_context_date
test_view = views.CustomSingleObjectView()
test_view.context_object_name = "pony"
context = test_view.get_context_data()
self.assertEqual(context["pony"], test_view.object)
def test_object_in_get_context_data(self):
# Checks 'object' key presence in dict returned by get_context_date #20234
test_view = views.CustomSingleObjectView()
context = test_view.get_context_data()
self.assertEqual(context["object"], test_view.object)
class UseMultipleObjectMixinTest(SimpleTestCase):
rf = RequestFactory()
def test_use_queryset_from_view(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get("/"))
# Don't pass queryset as argument
context = test_view.get_context_data()
self.assertEqual(context["object_list"], test_view.queryset)
def test_overwrite_queryset(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get("/"))
queryset = [{"name": "Lennon"}, {"name": "Ono"}]
self.assertNotEqual(test_view.queryset, queryset)
# Overwrite the view's queryset with queryset from kwarg
context = test_view.get_context_data(object_list=queryset)
self.assertEqual(context["object_list"], queryset)
class SingleObjectTemplateResponseMixinTest(SimpleTestCase):
def test_template_mixin_without_template(self):
"""
We want to makes sure that if you use a template mixin, but forget the
template, it still tells you it's ImproperlyConfigured instead of
TemplateDoesNotExist.
"""
view = views.TemplateResponseWithoutTemplate()
msg = (
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
view.get_template_names()
|
3274fad03f481452f508e892250ae9f469b2450daf531ac540d6ceaae851221b | from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.client import RequestFactory
from django.urls import reverse
from django.views.generic.base import View
from django.views.generic.edit import (
CreateView,
DeleteView,
DeleteViewCustomDeleteWarning,
FormMixin,
ModelFormMixin,
)
from . import views
from .forms import AuthorForm
from .models import Artist, Author
class FormMixinTests(SimpleTestCase):
request_factory = RequestFactory()
def test_initial_data(self):
"""Test instance independence of initial data dict (see #16138)"""
initial_1 = FormMixin().get_initial()
initial_1["foo"] = "bar"
initial_2 = FormMixin().get_initial()
self.assertNotEqual(initial_1, initial_2)
def test_get_prefix(self):
"""Test prefix can be set (see #18872)"""
test_string = "test"
get_request = self.request_factory.get("/")
class TestFormMixin(FormMixin):
request = get_request
default_kwargs = TestFormMixin().get_form_kwargs()
self.assertIsNone(default_kwargs.get("prefix"))
set_mixin = TestFormMixin()
set_mixin.prefix = test_string
set_kwargs = set_mixin.get_form_kwargs()
self.assertEqual(test_string, set_kwargs.get("prefix"))
def test_get_form(self):
class TestFormMixin(FormMixin):
request = self.request_factory.get("/")
self.assertIsInstance(
TestFormMixin().get_form(forms.Form),
forms.Form,
"get_form() should use provided form class.",
)
class FormClassTestFormMixin(TestFormMixin):
form_class = forms.Form
self.assertIsInstance(
FormClassTestFormMixin().get_form(),
forms.Form,
"get_form() should fallback to get_form_class() if none is provided.",
)
def test_get_context_data(self):
class FormContext(FormMixin):
request = self.request_factory.get("/")
form_class = forms.Form
self.assertIsInstance(FormContext().get_context_data()["form"], forms.Form)
@override_settings(ROOT_URLCONF="generic_views.urls")
class BasicFormTests(TestCase):
def test_post_data(self):
res = self.client.post("/contact/", {"name": "Me", "message": "Hello"})
self.assertRedirects(res, "/list/authors/")
def test_late_form_validation(self):
"""
A form can be marked invalid in the form_valid() method (#25548).
"""
res = self.client.post("/late-validation/", {"name": "Me", "message": "Hello"})
self.assertFalse(res.context["form"].is_valid())
class ModelFormMixinTests(SimpleTestCase):
def test_get_form(self):
form_class = views.AuthorGetQuerySetFormView().get_form_class()
self.assertEqual(form_class._meta.model, Author)
def test_get_form_checks_for_object(self):
mixin = ModelFormMixin()
mixin.request = RequestFactory().get("/")
self.assertEqual({"initial": {}, "prefix": None}, mixin.get_form_kwargs())
@override_settings(ROOT_URLCONF="generic_views.urls")
class CreateViewTests(TestCase):
def test_create(self):
res = self.client.get("/edit/authors/create/")
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context["form"], forms.ModelForm)
self.assertIsInstance(res.context["view"], View)
self.assertNotIn("object", res.context)
self.assertNotIn("author", res.context)
self.assertTemplateUsed(res, "generic_views/author_form.html")
res = self.client.post(
"/edit/authors/create/",
{"name": "Randall Munroe", "slug": "randall-munroe"},
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/list/authors/")
self.assertQuerysetEqual(
Author.objects.values_list("name", flat=True), ["Randall Munroe"]
)
def test_create_invalid(self):
res = self.client.post(
"/edit/authors/create/", {"name": "A" * 101, "slug": "randall-munroe"}
)
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/author_form.html")
self.assertEqual(len(res.context["form"].errors), 1)
self.assertEqual(Author.objects.count(), 0)
def test_create_with_object_url(self):
res = self.client.post("/edit/artists/create/", {"name": "Rene Magritte"})
self.assertEqual(res.status_code, 302)
artist = Artist.objects.get(name="Rene Magritte")
self.assertRedirects(res, "/detail/artist/%d/" % artist.pk)
self.assertQuerysetEqual(Artist.objects.all(), [artist])
def test_create_with_redirect(self):
res = self.client.post(
"/edit/authors/create/redirect/",
{"name": "Randall Munroe", "slug": "randall-munroe"},
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/edit/authors/create/")
self.assertQuerysetEqual(
Author.objects.values_list("name", flat=True), ["Randall Munroe"]
)
def test_create_with_interpolated_redirect(self):
res = self.client.post(
"/edit/authors/create/interpolate_redirect/",
{"name": "Randall Munroe", "slug": "randall-munroe"},
)
self.assertQuerysetEqual(
Author.objects.values_list("name", flat=True), ["Randall Munroe"]
)
self.assertEqual(res.status_code, 302)
pk = Author.objects.first().pk
self.assertRedirects(res, "/edit/author/%d/update/" % pk)
# Also test with escaped chars in URL
res = self.client.post(
"/edit/authors/create/interpolate_redirect_nonascii/",
{"name": "John Doe", "slug": "john-doe"},
)
self.assertEqual(res.status_code, 302)
pk = Author.objects.get(name="John Doe").pk
self.assertRedirects(res, "/%C3%A9dit/author/{}/update/".format(pk))
def test_create_with_special_properties(self):
res = self.client.get("/edit/authors/create/special/")
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context["form"], views.AuthorForm)
self.assertNotIn("object", res.context)
self.assertNotIn("author", res.context)
self.assertTemplateUsed(res, "generic_views/form.html")
res = self.client.post(
"/edit/authors/create/special/",
{"name": "Randall Munroe", "slug": "randall-munroe"},
)
self.assertEqual(res.status_code, 302)
obj = Author.objects.get(slug="randall-munroe")
self.assertRedirects(res, reverse("author_detail", kwargs={"pk": obj.pk}))
self.assertQuerysetEqual(Author.objects.all(), [obj])
def test_create_without_redirect(self):
msg = (
"No URL to redirect to. Either provide a url or define a "
"get_absolute_url method on the Model."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.post(
"/edit/authors/create/naive/",
{"name": "Randall Munroe", "slug": "randall-munroe"},
)
def test_create_restricted(self):
res = self.client.post(
"/edit/authors/create/restricted/",
{"name": "Randall Munroe", "slug": "randall-munroe"},
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(
res, "/accounts/login/?next=/edit/authors/create/restricted/"
)
def test_create_view_with_restricted_fields(self):
class MyCreateView(CreateView):
model = Author
fields = ["name"]
self.assertEqual(list(MyCreateView().get_form_class().base_fields), ["name"])
def test_create_view_all_fields(self):
class MyCreateView(CreateView):
model = Author
fields = "__all__"
self.assertEqual(
list(MyCreateView().get_form_class().base_fields), ["name", "slug"]
)
def test_create_view_without_explicit_fields(self):
class MyCreateView(CreateView):
model = Author
message = (
"Using ModelFormMixin (base class of MyCreateView) without the "
"'fields' attribute is prohibited."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
MyCreateView().get_form_class()
def test_define_both_fields_and_form_class(self):
class MyCreateView(CreateView):
model = Author
form_class = AuthorForm
fields = ["name"]
message = "Specifying both 'fields' and 'form_class' is not permitted."
with self.assertRaisesMessage(ImproperlyConfigured, message):
MyCreateView().get_form_class()
@override_settings(ROOT_URLCONF="generic_views.urls")
class UpdateViewTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.author = Author.objects.create(
pk=1, # Required for OneAuthorUpdate.
name="Randall Munroe",
slug="randall-munroe",
)
def test_update_post(self):
res = self.client.get("/edit/author/%d/update/" % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context["form"], forms.ModelForm)
self.assertEqual(res.context["object"], self.author)
self.assertEqual(res.context["author"], self.author)
self.assertTemplateUsed(res, "generic_views/author_form.html")
self.assertEqual(res.context["view"].get_form_called_count, 1)
# Modification with both POST and PUT (browser compatible)
res = self.client.post(
"/edit/author/%d/update/" % self.author.pk,
{"name": "Randall Munroe (xkcd)", "slug": "randall-munroe"},
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/list/authors/")
self.assertQuerysetEqual(
Author.objects.values_list("name", flat=True), ["Randall Munroe (xkcd)"]
)
def test_update_invalid(self):
res = self.client.post(
"/edit/author/%d/update/" % self.author.pk,
{"name": "A" * 101, "slug": "randall-munroe"},
)
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, "generic_views/author_form.html")
self.assertEqual(len(res.context["form"].errors), 1)
self.assertQuerysetEqual(Author.objects.all(), [self.author])
self.assertEqual(res.context["view"].get_form_called_count, 1)
def test_update_with_object_url(self):
a = Artist.objects.create(name="Rene Magritte")
res = self.client.post(
"/edit/artists/%d/update/" % a.pk, {"name": "Rene Magritte"}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/detail/artist/%d/" % a.pk)
self.assertQuerysetEqual(Artist.objects.all(), [a])
def test_update_with_redirect(self):
res = self.client.post(
"/edit/author/%d/update/redirect/" % self.author.pk,
{"name": "Randall Munroe (author of xkcd)", "slug": "randall-munroe"},
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/edit/authors/create/")
self.assertQuerysetEqual(
Author.objects.values_list("name", flat=True),
["Randall Munroe (author of xkcd)"],
)
def test_update_with_interpolated_redirect(self):
res = self.client.post(
"/edit/author/%d/update/interpolate_redirect/" % self.author.pk,
{"name": "Randall Munroe (author of xkcd)", "slug": "randall-munroe"},
)
self.assertQuerysetEqual(
Author.objects.values_list("name", flat=True),
["Randall Munroe (author of xkcd)"],
)
self.assertEqual(res.status_code, 302)
pk = Author.objects.first().pk
self.assertRedirects(res, "/edit/author/%d/update/" % pk)
# Also test with escaped chars in URL
res = self.client.post(
"/edit/author/%d/update/interpolate_redirect_nonascii/" % self.author.pk,
{"name": "John Doe", "slug": "john-doe"},
)
self.assertEqual(res.status_code, 302)
pk = Author.objects.get(name="John Doe").pk
self.assertRedirects(res, "/%C3%A9dit/author/{}/update/".format(pk))
def test_update_with_special_properties(self):
res = self.client.get("/edit/author/%d/update/special/" % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context["form"], views.AuthorForm)
self.assertEqual(res.context["object"], self.author)
self.assertEqual(res.context["thingy"], self.author)
self.assertNotIn("author", res.context)
self.assertTemplateUsed(res, "generic_views/form.html")
res = self.client.post(
"/edit/author/%d/update/special/" % self.author.pk,
{"name": "Randall Munroe (author of xkcd)", "slug": "randall-munroe"},
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/detail/author/%d/" % self.author.pk)
self.assertQuerysetEqual(
Author.objects.values_list("name", flat=True),
["Randall Munroe (author of xkcd)"],
)
def test_update_without_redirect(self):
msg = (
"No URL to redirect to. Either provide a url or define a "
"get_absolute_url method on the Model."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.post(
"/edit/author/%d/update/naive/" % self.author.pk,
{"name": "Randall Munroe (author of xkcd)", "slug": "randall-munroe"},
)
def test_update_get_object(self):
res = self.client.get("/edit/author/update/")
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context["form"], forms.ModelForm)
self.assertIsInstance(res.context["view"], View)
self.assertEqual(res.context["object"], self.author)
self.assertEqual(res.context["author"], self.author)
self.assertTemplateUsed(res, "generic_views/author_form.html")
# Modification with both POST and PUT (browser compatible)
res = self.client.post(
"/edit/author/update/",
{"name": "Randall Munroe (xkcd)", "slug": "randall-munroe"},
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/list/authors/")
self.assertQuerysetEqual(
Author.objects.values_list("name", flat=True), ["Randall Munroe (xkcd)"]
)
@override_settings(ROOT_URLCONF="generic_views.urls")
class DeleteViewTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.author = Author.objects.create(
name="Randall Munroe",
slug="randall-munroe",
)
def test_delete_by_post(self):
res = self.client.get("/edit/author/%d/delete/" % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author)
self.assertEqual(res.context["author"], self.author)
self.assertTemplateUsed(res, "generic_views/author_confirm_delete.html")
# Deletion with POST
res = self.client.post("/edit/author/%d/delete/" % self.author.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/list/authors/")
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_by_delete(self):
# Deletion with browser compatible DELETE method
res = self.client.delete("/edit/author/%d/delete/" % self.author.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/list/authors/")
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_with_redirect(self):
res = self.client.post("/edit/author/%d/delete/redirect/" % self.author.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/edit/authors/create/")
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_with_interpolated_redirect(self):
res = self.client.post(
"/edit/author/%d/delete/interpolate_redirect/" % self.author.pk
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/edit/authors/create/?deleted=%d" % self.author.pk)
self.assertQuerysetEqual(Author.objects.all(), [])
# Also test with escaped chars in URL
a = Author.objects.create(
**{"name": "Randall Munroe", "slug": "randall-munroe"}
)
res = self.client.post(
"/edit/author/{}/delete/interpolate_redirect_nonascii/".format(a.pk)
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/%C3%A9dit/authors/create/?deleted={}".format(a.pk))
def test_delete_with_special_properties(self):
res = self.client.get("/edit/author/%d/delete/special/" % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author)
self.assertEqual(res.context["thingy"], self.author)
self.assertNotIn("author", res.context)
self.assertTemplateUsed(res, "generic_views/confirm_delete.html")
res = self.client.post("/edit/author/%d/delete/special/" % self.author.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/list/authors/")
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_without_redirect(self):
msg = "No URL to redirect to. Provide a success_url."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.post("/edit/author/%d/delete/naive/" % self.author.pk)
def test_delete_with_form_as_post(self):
res = self.client.get("/edit/author/%d/delete/form/" % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author)
self.assertEqual(res.context["author"], self.author)
self.assertTemplateUsed(res, "generic_views/author_confirm_delete.html")
res = self.client.post(
"/edit/author/%d/delete/form/" % self.author.pk, data={"confirm": True}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, "/list/authors/")
self.assertSequenceEqual(Author.objects.all(), [])
def test_delete_with_form_as_post_with_validation_error(self):
res = self.client.get("/edit/author/%d/delete/form/" % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context["object"], self.author)
self.assertEqual(res.context["author"], self.author)
self.assertTemplateUsed(res, "generic_views/author_confirm_delete.html")
res = self.client.post("/edit/author/%d/delete/form/" % self.author.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(len(res.context_data["form"].errors), 2)
self.assertEqual(
res.context_data["form"].errors["__all__"],
["You must confirm the delete."],
)
self.assertEqual(
res.context_data["form"].errors["confirm"],
["This field is required."],
)
# RemovedInDjango50Warning.
def test_delete_with_custom_delete(self):
class AuthorDeleteView(DeleteView):
model = Author
def delete(self, request, *args, **kwargs):
# Custom logic.
pass
msg = (
"DeleteView uses FormMixin to handle POST requests. As a "
"consequence, any custom deletion logic in "
"AuthorDeleteView.delete() handler should be moved to "
"form_valid()."
)
with self.assertWarnsMessage(DeleteViewCustomDeleteWarning, msg):
AuthorDeleteView()
|
6683ec066013d2b907035e534be8fcfadf3bcb1bde3a985370803a54cf8166aa | from django.contrib.auth import views as auth_views
from django.contrib.auth.decorators import login_required
from django.urls import path, re_path
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView, dates
from . import views
from .models import Book
urlpatterns = [
# TemplateView
path("template/no_template/", TemplateView.as_view()),
path("template/login_required/", login_required(TemplateView.as_view())),
path(
"template/simple/<foo>/",
TemplateView.as_view(template_name="generic_views/about.html"),
),
path(
"template/custom/<foo>/",
views.CustomTemplateView.as_view(template_name="generic_views/about.html"),
),
path(
"template/content_type/",
TemplateView.as_view(
template_name="generic_views/robots.txt", content_type="text/plain"
),
),
path(
"template/cached/<foo>/",
cache_page(2.0)(TemplateView.as_view(template_name="generic_views/about.html")),
),
path(
"template/extra_context/",
TemplateView.as_view(
template_name="generic_views/about.html", extra_context={"title": "Title"}
),
),
# DetailView
path("detail/obj/", views.ObjectDetail.as_view()),
path("detail/artist/<int:pk>/", views.ArtistDetail.as_view(), name="artist_detail"),
path("detail/author/<int:pk>/", views.AuthorDetail.as_view(), name="author_detail"),
path(
"detail/author/bycustompk/<foo>/",
views.AuthorDetail.as_view(pk_url_kwarg="foo"),
),
path("detail/author/byslug/<slug>/", views.AuthorDetail.as_view()),
path(
"detail/author/bycustomslug/<foo>/",
views.AuthorDetail.as_view(slug_url_kwarg="foo"),
),
path("detail/author/bypkignoreslug/<int:pk>-<slug>/", views.AuthorDetail.as_view()),
path(
"detail/author/bypkandslug/<int:pk>-<slug>/",
views.AuthorDetail.as_view(query_pk_and_slug=True),
),
path(
"detail/author/<int:pk>/template_name_suffix/",
views.AuthorDetail.as_view(template_name_suffix="_view"),
),
path(
"detail/author/<int:pk>/template_name/",
views.AuthorDetail.as_view(template_name="generic_views/about.html"),
),
path(
"detail/author/<int:pk>/context_object_name/",
views.AuthorDetail.as_view(context_object_name="thingy"),
),
path("detail/author/<int:pk>/custom_detail/", views.AuthorCustomDetail.as_view()),
path(
"detail/author/<int:pk>/dupe_context_object_name/",
views.AuthorDetail.as_view(context_object_name="object"),
),
path("detail/page/<int:pk>/field/", views.PageDetail.as_view()),
path(r"detail/author/invalid/url/", views.AuthorDetail.as_view()),
path("detail/author/invalid/qs/", views.AuthorDetail.as_view(queryset=None)),
path("detail/nonmodel/1/", views.NonModelDetail.as_view()),
path("detail/doesnotexist/<pk>/", views.ObjectDoesNotExistDetail.as_view()),
# FormView
path("contact/", views.ContactView.as_view()),
path("late-validation/", views.LateValidationView.as_view()),
# Create/UpdateView
path("edit/artists/create/", views.ArtistCreate.as_view()),
path("edit/artists/<int:pk>/update/", views.ArtistUpdate.as_view()),
path("edit/authors/create/naive/", views.NaiveAuthorCreate.as_view()),
path(
"edit/authors/create/redirect/",
views.NaiveAuthorCreate.as_view(success_url="/edit/authors/create/"),
),
path(
"edit/authors/create/interpolate_redirect/",
views.NaiveAuthorCreate.as_view(success_url="/edit/author/{id}/update/"),
),
path(
"edit/authors/create/interpolate_redirect_nonascii/",
views.NaiveAuthorCreate.as_view(success_url="/%C3%A9dit/author/{id}/update/"),
),
path("edit/authors/create/restricted/", views.AuthorCreateRestricted.as_view()),
re_path("^[eé]dit/authors/create/$", views.AuthorCreate.as_view()),
path("edit/authors/create/special/", views.SpecializedAuthorCreate.as_view()),
path("edit/author/<int:pk>/update/naive/", views.NaiveAuthorUpdate.as_view()),
path(
"edit/author/<int:pk>/update/redirect/",
views.NaiveAuthorUpdate.as_view(success_url="/edit/authors/create/"),
),
path(
"edit/author/<int:pk>/update/interpolate_redirect/",
views.NaiveAuthorUpdate.as_view(success_url="/edit/author/{id}/update/"),
),
path(
"edit/author/<int:pk>/update/interpolate_redirect_nonascii/",
views.NaiveAuthorUpdate.as_view(success_url="/%C3%A9dit/author/{id}/update/"),
),
re_path("^[eé]dit/author/(?P<pk>[0-9]+)/update/$", views.AuthorUpdate.as_view()),
path("edit/author/update/", views.OneAuthorUpdate.as_view()),
path(
"edit/author/<int:pk>/update/special/", views.SpecializedAuthorUpdate.as_view()
),
path("edit/author/<int:pk>/delete/naive/", views.NaiveAuthorDelete.as_view()),
path(
"edit/author/<int:pk>/delete/redirect/",
views.NaiveAuthorDelete.as_view(success_url="/edit/authors/create/"),
),
path(
"edit/author/<int:pk>/delete/interpolate_redirect/",
views.NaiveAuthorDelete.as_view(
success_url="/edit/authors/create/?deleted={id}"
),
),
path(
"edit/author/<int:pk>/delete/interpolate_redirect_nonascii/",
views.NaiveAuthorDelete.as_view(
success_url="/%C3%A9dit/authors/create/?deleted={id}"
),
),
path("edit/author/<int:pk>/delete/", views.AuthorDelete.as_view()),
path(
"edit/author/<int:pk>/delete/special/", views.SpecializedAuthorDelete.as_view()
),
path("edit/author/<int:pk>/delete/form/", views.AuthorDeleteFormView.as_view()),
# ArchiveIndexView
path("dates/books/", views.BookArchive.as_view()),
path(
"dates/books/context_object_name/",
views.BookArchive.as_view(context_object_name="thingies"),
),
path("dates/books/allow_empty/", views.BookArchive.as_view(allow_empty=True)),
path(
"dates/books/template_name/",
views.BookArchive.as_view(template_name="generic_views/list.html"),
),
path(
"dates/books/template_name_suffix/",
views.BookArchive.as_view(template_name_suffix="_detail"),
),
path("dates/books/invalid/", views.BookArchive.as_view(queryset=None)),
path("dates/books/paginated/", views.BookArchive.as_view(paginate_by=10)),
path(
"dates/books/reverse/",
views.BookArchive.as_view(queryset=Book.objects.order_by("pubdate")),
),
path("dates/books/by_month/", views.BookArchive.as_view(date_list_period="month")),
path("dates/booksignings/", views.BookSigningArchive.as_view()),
path("dates/books/sortedbyname/", views.BookArchive.as_view(ordering="name")),
path("dates/books/sortedbynamedec/", views.BookArchive.as_view(ordering="-name")),
path(
"dates/books/without_date_field/", views.BookArchiveWithoutDateField.as_view()
),
# ListView
path("list/dict/", views.DictList.as_view()),
path("list/dict/paginated/", views.DictList.as_view(paginate_by=1)),
path("list/artists/", views.ArtistList.as_view(), name="artists_list"),
path("list/authors/", views.AuthorList.as_view(), name="authors_list"),
path("list/authors/paginated/", views.AuthorList.as_view(paginate_by=30)),
path(
"list/authors/paginated/<int:page>/", views.AuthorList.as_view(paginate_by=30)
),
path(
"list/authors/paginated-orphaned/",
views.AuthorList.as_view(paginate_by=30, paginate_orphans=2),
),
path("list/authors/notempty/", views.AuthorList.as_view(allow_empty=False)),
path(
"list/authors/notempty/paginated/",
views.AuthorList.as_view(allow_empty=False, paginate_by=2),
),
path(
"list/authors/template_name/",
views.AuthorList.as_view(template_name="generic_views/list.html"),
),
path(
"list/authors/template_name_suffix/",
views.AuthorList.as_view(template_name_suffix="_objects"),
),
path(
"list/authors/context_object_name/",
views.AuthorList.as_view(context_object_name="author_list"),
),
path(
"list/authors/dupe_context_object_name/",
views.AuthorList.as_view(context_object_name="object_list"),
),
path("list/authors/invalid/", views.AuthorList.as_view(queryset=None)),
path(
"list/authors/get_queryset/",
views.AuthorListGetQuerysetReturnsNone.as_view(),
),
path(
"list/authors/paginated/custom_class/",
views.AuthorList.as_view(paginate_by=5, paginator_class=views.CustomPaginator),
),
path(
"list/authors/paginated/custom_page_kwarg/",
views.AuthorList.as_view(paginate_by=30, page_kwarg="pagina"),
),
path(
"list/authors/paginated/custom_constructor/",
views.AuthorListCustomPaginator.as_view(),
),
path("list/books/sorted/", views.BookList.as_view(ordering="name")),
path(
"list/books/sortedbypagesandnamedec/",
views.BookList.as_view(ordering=("pages", "-name")),
),
# YearArchiveView
# Mixing keyword and positional captures below is intentional; the views
# ought to be able to accept either.
path("dates/books/<int:year>/", views.BookYearArchive.as_view()),
path(
"dates/books/<int:year>/make_object_list/",
views.BookYearArchive.as_view(make_object_list=True),
),
path(
"dates/books/<int:year>/allow_empty/",
views.BookYearArchive.as_view(allow_empty=True),
),
path(
"dates/books/<int:year>/allow_future/",
views.BookYearArchive.as_view(allow_future=True),
),
path(
"dates/books/<int:year>/paginated/",
views.BookYearArchive.as_view(make_object_list=True, paginate_by=30),
),
path(
"dates/books/<int:year>/sortedbyname/",
views.BookYearArchive.as_view(make_object_list=True, ordering="name"),
),
path(
"dates/books/<int:year>/sortedbypageandnamedec/",
views.BookYearArchive.as_view(
make_object_list=True, ordering=("pages", "-name")
),
),
path("dates/books/no_year/", views.BookYearArchive.as_view()),
path(
"dates/books/<int:year>/reverse/",
views.BookYearArchive.as_view(queryset=Book.objects.order_by("pubdate")),
),
path("dates/booksignings/<int:year>/", views.BookSigningYearArchive.as_view()),
# MonthArchiveView
path(
"dates/books/<int:year>/<int:month>/",
views.BookMonthArchive.as_view(month_format="%m"),
),
path("dates/books/<int:year>/<month>/", views.BookMonthArchive.as_view()),
path("dates/books/without_month/<int:year>/", views.BookMonthArchive.as_view()),
path(
"dates/books/<int:year>/<month>/allow_empty/",
views.BookMonthArchive.as_view(allow_empty=True),
),
path(
"dates/books/<int:year>/<month>/allow_future/",
views.BookMonthArchive.as_view(allow_future=True),
),
path(
"dates/books/<int:year>/<month>/paginated/",
views.BookMonthArchive.as_view(paginate_by=30),
),
path("dates/books/<int:year>/no_month/", views.BookMonthArchive.as_view()),
path(
"dates/booksignings/<int:year>/<month>/",
views.BookSigningMonthArchive.as_view(),
),
# WeekArchiveView
path("dates/books/<int:year>/week/<int:week>/", views.BookWeekArchive.as_view()),
path(
"dates/books/<int:year>/week/<int:week>/allow_empty/",
views.BookWeekArchive.as_view(allow_empty=True),
),
path(
"dates/books/<int:year>/week/<int:week>/allow_future/",
views.BookWeekArchive.as_view(allow_future=True),
),
path(
"dates/books/<int:year>/week/<int:week>/paginated/",
views.BookWeekArchive.as_view(paginate_by=30),
),
path("dates/books/<int:year>/week/no_week/", views.BookWeekArchive.as_view()),
path(
"dates/books/<int:year>/week/<int:week>/monday/",
views.BookWeekArchive.as_view(week_format="%W"),
),
path(
"dates/books/<int:year>/week/<int:week>/unknown_week_format/",
views.BookWeekArchive.as_view(week_format="%T"),
),
path(
"dates/books/<int:year>/week/<int:week>/iso_format/",
views.BookWeekArchive.as_view(year_format="%G", week_format="%V"),
),
path(
"dates/books/<int:year>/week/<int:week>/invalid_iso_week_year_format/",
views.BookWeekArchive.as_view(week_format="%V"),
),
path(
"dates/booksignings/<int:year>/week/<int:week>/",
views.BookSigningWeekArchive.as_view(),
),
# DayArchiveView
path(
"dates/books/<int:year>/<int:month>/<int:day>/",
views.BookDayArchive.as_view(month_format="%m"),
),
path("dates/books/<int:year>/<month>/<int:day>/", views.BookDayArchive.as_view()),
path(
"dates/books/<int:year>/<month>/<int:day>/allow_empty/",
views.BookDayArchive.as_view(allow_empty=True),
),
path(
"dates/books/<int:year>/<month>/<int:day>/allow_future/",
views.BookDayArchive.as_view(allow_future=True),
),
path(
"dates/books/<int:year>/<month>/<int:day>/allow_empty_and_future/",
views.BookDayArchive.as_view(allow_empty=True, allow_future=True),
),
path(
"dates/books/<int:year>/<month>/<int:day>/paginated/",
views.BookDayArchive.as_view(paginate_by=True),
),
path("dates/books/<int:year>/<month>/no_day/", views.BookDayArchive.as_view()),
path(
"dates/booksignings/<int:year>/<month>/<int:day>/",
views.BookSigningDayArchive.as_view(),
),
# TodayArchiveView
path("dates/books/today/", views.BookTodayArchive.as_view()),
path(
"dates/books/today/allow_empty/",
views.BookTodayArchive.as_view(allow_empty=True),
),
path("dates/booksignings/today/", views.BookSigningTodayArchive.as_view()),
# DateDetailView
path(
"dates/books/<int:year>/<int:month>/<day>/<int:pk>/",
views.BookDetail.as_view(month_format="%m"),
),
path("dates/books/<int:year>/<month>/<day>/<int:pk>/", views.BookDetail.as_view()),
path(
"dates/books/<int:year>/<month>/<int:day>/<int:pk>/allow_future/",
views.BookDetail.as_view(allow_future=True),
),
path("dates/books/<int:year>/<month>/<int:day>/nopk/", views.BookDetail.as_view()),
path(
"dates/books/<int:year>/<month>/<int:day>/byslug/<slug:slug>/",
views.BookDetail.as_view(),
),
path(
"dates/books/get_object_custom_queryset/<int:year>/<month>/<int:day>/<int:pk>/",
views.BookDetailGetObjectCustomQueryset.as_view(),
),
path(
"dates/booksignings/<int:year>/<month>/<int:day>/<int:pk>/",
views.BookSigningDetail.as_view(),
),
# Useful for testing redirects
path("accounts/login/", auth_views.LoginView.as_view()),
path("BaseDateListViewTest/", dates.BaseDateListView.as_view()),
]
|
355c9dccedb5785b8a9032888ee3a8eeb513b3c546deb38b5ccd6110fee50470 | from django import forms
from .models import Author
class AuthorForm(forms.ModelForm):
name = forms.CharField()
slug = forms.SlugField()
class Meta:
model = Author
fields = ["name", "slug"]
class ContactForm(forms.Form):
name = forms.CharField()
message = forms.CharField(widget=forms.Textarea)
class ConfirmDeleteForm(forms.Form):
confirm = forms.BooleanField()
def clean(self):
cleaned_data = super().clean()
if "confirm" not in cleaned_data:
raise forms.ValidationError("You must confirm the delete.")
|
98e4569a77aa779ff39b13c39d67557fbae0a803b9d3a7f6336abe8356693ed8 | from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator
from django.urls import reverse, reverse_lazy
from django.utils.decorators import method_decorator
from django.views import generic
from .forms import AuthorForm, ConfirmDeleteForm, ContactForm
from .models import Artist, Author, Book, BookSigning, Page
class CustomTemplateView(generic.TemplateView):
template_name = "generic_views/about.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({"key": "value"})
return context
class ObjectDetail(generic.DetailView):
template_name = "generic_views/detail.html"
def get_object(self):
return {"foo": "bar"}
class ArtistDetail(generic.DetailView):
queryset = Artist.objects.all()
class AuthorDetail(generic.DetailView):
queryset = Author.objects.all()
class AuthorCustomDetail(generic.DetailView):
template_name = "generic_views/author_detail.html"
queryset = Author.objects.all()
def get(self, request, *args, **kwargs):
# Ensures get_context_object_name() doesn't reference self.object.
author = self.get_object()
context = {"custom_" + self.get_context_object_name(author): author}
return self.render_to_response(context)
class PageDetail(generic.DetailView):
queryset = Page.objects.all()
template_name_field = "template"
class DictList(generic.ListView):
"""A ListView that doesn't use a model."""
queryset = [{"first": "John", "last": "Lennon"}, {"first": "Yoko", "last": "Ono"}]
template_name = "generic_views/list.html"
class ArtistList(generic.ListView):
template_name = "generic_views/list.html"
queryset = Artist.objects.all()
class AuthorList(generic.ListView):
queryset = Author.objects.all()
class AuthorListGetQuerysetReturnsNone(AuthorList):
def get_queryset(self):
return None
class BookList(generic.ListView):
model = Book
class CustomPaginator(Paginator):
def __init__(self, queryset, page_size, orphans=0, allow_empty_first_page=True):
super().__init__(
queryset,
page_size,
orphans=2,
allow_empty_first_page=allow_empty_first_page,
)
class AuthorListCustomPaginator(AuthorList):
paginate_by = 5
def get_paginator(
self, queryset, page_size, orphans=0, allow_empty_first_page=True
):
return super().get_paginator(
queryset,
page_size,
orphans=2,
allow_empty_first_page=allow_empty_first_page,
)
class ContactView(generic.FormView):
form_class = ContactForm
success_url = reverse_lazy("authors_list")
template_name = "generic_views/form.html"
class ArtistCreate(generic.CreateView):
model = Artist
fields = "__all__"
class NaiveAuthorCreate(generic.CreateView):
queryset = Author.objects.all()
fields = "__all__"
class TemplateResponseWithoutTemplate(
generic.detail.SingleObjectTemplateResponseMixin, generic.View
):
# we don't define the usual template_name here
def __init__(self):
# Dummy object, but attr is required by get_template_name()
self.object = None
class AuthorCreate(generic.CreateView):
model = Author
success_url = "/list/authors/"
fields = "__all__"
class SpecializedAuthorCreate(generic.CreateView):
model = Author
form_class = AuthorForm
template_name = "generic_views/form.html"
context_object_name = "thingy"
def get_success_url(self):
return reverse("author_detail", args=[self.object.id])
class AuthorCreateRestricted(AuthorCreate):
post = method_decorator(login_required)(AuthorCreate.post)
class ArtistUpdate(generic.UpdateView):
model = Artist
fields = "__all__"
class NaiveAuthorUpdate(generic.UpdateView):
queryset = Author.objects.all()
fields = "__all__"
class AuthorUpdate(generic.UpdateView):
get_form_called_count = 0 # Used to ensure get_form() is called once.
model = Author
success_url = "/list/authors/"
fields = "__all__"
def get_form(self, *args, **kwargs):
self.get_form_called_count += 1
return super().get_form(*args, **kwargs)
class OneAuthorUpdate(generic.UpdateView):
success_url = "/list/authors/"
fields = "__all__"
def get_object(self):
return Author.objects.get(pk=1)
class SpecializedAuthorUpdate(generic.UpdateView):
model = Author
form_class = AuthorForm
template_name = "generic_views/form.html"
context_object_name = "thingy"
def get_success_url(self):
return reverse("author_detail", args=[self.object.id])
class NaiveAuthorDelete(generic.DeleteView):
queryset = Author.objects.all()
class AuthorDelete(generic.DeleteView):
model = Author
success_url = "/list/authors/"
class AuthorDeleteFormView(generic.DeleteView):
model = Author
form_class = ConfirmDeleteForm
def get_success_url(self):
return reverse("authors_list")
class SpecializedAuthorDelete(generic.DeleteView):
queryset = Author.objects.all()
template_name = "generic_views/confirm_delete.html"
context_object_name = "thingy"
success_url = reverse_lazy("authors_list")
class BookConfig:
queryset = Book.objects.all()
date_field = "pubdate"
class BookArchive(BookConfig, generic.ArchiveIndexView):
pass
class BookYearArchive(BookConfig, generic.YearArchiveView):
pass
class BookMonthArchive(BookConfig, generic.MonthArchiveView):
pass
class BookWeekArchive(BookConfig, generic.WeekArchiveView):
pass
class BookDayArchive(BookConfig, generic.DayArchiveView):
pass
class BookTodayArchive(BookConfig, generic.TodayArchiveView):
pass
class BookDetail(BookConfig, generic.DateDetailView):
pass
class AuthorGetQuerySetFormView(generic.edit.ModelFormMixin):
fields = "__all__"
def get_queryset(self):
return Author.objects.all()
class BookDetailGetObjectCustomQueryset(BookDetail):
def get_object(self, queryset=None):
return super().get_object(queryset=Book.objects.filter(pk=self.kwargs["pk"]))
class CustomMultipleObjectMixinView(generic.list.MultipleObjectMixin, generic.View):
queryset = [
{"name": "John"},
{"name": "Yoko"},
]
def get(self, request):
self.object_list = self.get_queryset()
class CustomContextView(generic.detail.SingleObjectMixin, generic.View):
model = Book
object = Book(name="dummy")
def get_object(self):
return Book(name="dummy")
def get_context_data(self, **kwargs):
context = {"custom_key": "custom_value"}
context.update(kwargs)
return super().get_context_data(**context)
def get_context_object_name(self, obj):
return "test_name"
class CustomSingleObjectView(generic.detail.SingleObjectMixin, generic.View):
model = Book
object = Book(name="dummy")
class BookSigningConfig:
model = BookSigning
date_field = "event_date"
# use the same templates as for books
def get_template_names(self):
return ["generic_views/book%s.html" % self.template_name_suffix]
class BookSigningArchive(BookSigningConfig, generic.ArchiveIndexView):
pass
class BookSigningYearArchive(BookSigningConfig, generic.YearArchiveView):
pass
class BookSigningMonthArchive(BookSigningConfig, generic.MonthArchiveView):
pass
class BookSigningWeekArchive(BookSigningConfig, generic.WeekArchiveView):
pass
class BookSigningDayArchive(BookSigningConfig, generic.DayArchiveView):
pass
class BookSigningTodayArchive(BookSigningConfig, generic.TodayArchiveView):
pass
class BookArchiveWithoutDateField(generic.ArchiveIndexView):
queryset = Book.objects.all()
class BookSigningDetail(BookSigningConfig, generic.DateDetailView):
context_object_name = "book"
class NonModel:
id = "non_model_1"
_meta = None
class NonModelDetail(generic.DetailView):
template_name = "generic_views/detail.html"
model = NonModel
def get_object(self, queryset=None):
return NonModel()
class ObjectDoesNotExistDetail(generic.DetailView):
def get_queryset(self):
return Book.does_not_exist.all()
class LateValidationView(generic.FormView):
form_class = ContactForm
success_url = reverse_lazy("authors_list")
template_name = "generic_views/form.html"
def form_valid(self, form):
form.add_error(None, "There is an error")
return self.form_invalid(form)
|
32e62d2967484c5ca007c5ea15483abc7f567c45c657d705e50adcee7edaf071 | from django.http import HttpResponse
from django.urls import path
urlpatterns = [
path("", lambda request: HttpResponse("root is here")),
]
|
85e8c394dffa47ce99270a7f0947e966d59892d424668075b3917af26af35f1f | import gzip
import random
import re
import struct
from io import BytesIO
from urllib.parse import quote
from django.conf import settings
from django.core import mail
from django.core.exceptions import PermissionDenied
from django.http import (
FileResponse,
HttpRequest,
HttpResponse,
HttpResponseNotFound,
HttpResponsePermanentRedirect,
HttpResponseRedirect,
StreamingHttpResponse,
)
from django.middleware.clickjacking import XFrameOptionsMiddleware
from django.middleware.common import BrokenLinkEmailsMiddleware, CommonMiddleware
from django.middleware.gzip import GZipMiddleware
from django.middleware.http import ConditionalGetMiddleware
from django.test import RequestFactory, SimpleTestCase, override_settings
int2byte = struct.Struct(">B").pack
def get_response_empty(request):
return HttpResponse()
def get_response_404(request):
return HttpResponseNotFound()
@override_settings(ROOT_URLCONF="middleware.urls")
class CommonMiddlewareTest(SimpleTestCase):
rf = RequestFactory()
@override_settings(APPEND_SLASH=True)
def test_append_slash_have_slash(self):
"""
URLs with slashes should go unmolested.
"""
request = self.rf.get("/slash/")
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_resource(self):
"""
Matches to explicit slashless URLs should go unmolested.
"""
def get_response(req):
return HttpResponse("Here's the text of the web page.")
request = self.rf.get("/noslash")
self.assertIsNone(CommonMiddleware(get_response).process_request(request))
self.assertEqual(
CommonMiddleware(get_response)(request).content,
b"Here's the text of the web page.",
)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_unknown(self):
"""
APPEND_SLASH should not redirect to unknown resources.
"""
request = self.rf.get("/unknown")
response = CommonMiddleware(get_response_404)(request)
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect(self):
"""
APPEND_SLASH should redirect slashless URLs to a valid pattern.
"""
request = self.rf.get("/slash")
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_querystring(self):
"""
APPEND_SLASH should preserve querystrings when redirecting.
"""
request = self.rf.get("/slash?test=1")
resp = CommonMiddleware(get_response_404)(request)
self.assertEqual(resp.url, "/slash/?test=1")
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_querystring_have_slash(self):
"""
APPEND_SLASH should append slash to path when redirecting a request
with a querystring ending with slash.
"""
request = self.rf.get("/slash?test=slash/")
resp = CommonMiddleware(get_response_404)(request)
self.assertIsInstance(resp, HttpResponsePermanentRedirect)
self.assertEqual(resp.url, "/slash/?test=slash/")
@override_settings(APPEND_SLASH=True, DEBUG=True)
def test_append_slash_no_redirect_on_POST_in_DEBUG(self):
"""
While in debug mode, an exception is raised with a warning
when a failed attempt is made to POST, PUT, or PATCH to an URL which
would normally be redirected to a slashed version.
"""
msg = "maintaining %s data. Change your form to point to testserver/slash/"
request = self.rf.get("/slash")
request.method = "POST"
with self.assertRaisesMessage(RuntimeError, msg % request.method):
CommonMiddleware(get_response_404)(request)
request = self.rf.get("/slash")
request.method = "PUT"
with self.assertRaisesMessage(RuntimeError, msg % request.method):
CommonMiddleware(get_response_404)(request)
request = self.rf.get("/slash")
request.method = "PATCH"
with self.assertRaisesMessage(RuntimeError, msg % request.method):
CommonMiddleware(get_response_404)(request)
@override_settings(APPEND_SLASH=False)
def test_append_slash_disabled(self):
"""
Disabling append slash functionality should leave slashless URLs alone.
"""
request = self.rf.get("/slash")
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_opt_out(self):
"""
Views marked with @no_append_slash should be left alone.
"""
request = self.rf.get("/sensitive_fbv")
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
request = self.rf.get("/sensitive_cbv")
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_quoted(self):
"""
URLs which require quoting should be redirected to their slash version.
"""
request = self.rf.get(quote("/needsquoting#"))
r = CommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/needsquoting%23/")
@override_settings(APPEND_SLASH=True)
def test_append_slash_leading_slashes(self):
"""
Paths starting with two slashes are escaped to prevent open redirects.
If there's a URL pattern that allows paths to start with two slashes, a
request with path //evil.com must not redirect to //evil.com/ (appended
slash) which is a schemaless absolute URL. The browser would navigate
to evil.com/.
"""
# Use 4 slashes because of RequestFactory behavior.
request = self.rf.get("////evil.com/security")
r = CommonMiddleware(get_response_404).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/%2Fevil.com/security/")
r = CommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/%2Fevil.com/security/")
@override_settings(APPEND_SLASH=False, PREPEND_WWW=True)
def test_prepend_www(self):
request = self.rf.get("/path/")
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/path/")
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_have_slash(self):
request = self.rf.get("/slash/")
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/slash/")
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_slashless(self):
request = self.rf.get("/slash")
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/slash/")
# The following tests examine expected behavior given a custom URLconf that
# overrides the default one through the request object.
@override_settings(APPEND_SLASH=True)
def test_append_slash_have_slash_custom_urlconf(self):
"""
URLs with slashes should go unmolested.
"""
request = self.rf.get("/customurlconf/slash/")
request.urlconf = "middleware.extra_urls"
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_resource_custom_urlconf(self):
"""
Matches to explicit slashless URLs should go unmolested.
"""
def get_response(req):
return HttpResponse("web content")
request = self.rf.get("/customurlconf/noslash")
request.urlconf = "middleware.extra_urls"
self.assertIsNone(CommonMiddleware(get_response).process_request(request))
self.assertEqual(
CommonMiddleware(get_response)(request).content, b"web content"
)
@override_settings(APPEND_SLASH=True)
def test_append_slash_slashless_unknown_custom_urlconf(self):
"""
APPEND_SLASH should not redirect to unknown resources.
"""
request = self.rf.get("/customurlconf/unknown")
request.urlconf = "middleware.extra_urls"
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_redirect_custom_urlconf(self):
"""
APPEND_SLASH should redirect slashless URLs to a valid pattern.
"""
request = self.rf.get("/customurlconf/slash")
request.urlconf = "middleware.extra_urls"
r = CommonMiddleware(get_response_404)(request)
self.assertIsNotNone(
r,
"CommonMiddleware failed to return APPEND_SLASH redirect using "
"request.urlconf",
)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/customurlconf/slash/")
@override_settings(APPEND_SLASH=True, DEBUG=True)
def test_append_slash_no_redirect_on_POST_in_DEBUG_custom_urlconf(self):
"""
While in debug mode, an exception is raised with a warning
when a failed attempt is made to POST to an URL which would normally be
redirected to a slashed version.
"""
request = self.rf.get("/customurlconf/slash")
request.urlconf = "middleware.extra_urls"
request.method = "POST"
with self.assertRaisesMessage(RuntimeError, "end in a slash"):
CommonMiddleware(get_response_404)(request)
@override_settings(APPEND_SLASH=False)
def test_append_slash_disabled_custom_urlconf(self):
"""
Disabling append slash functionality should leave slashless URLs alone.
"""
request = self.rf.get("/customurlconf/slash")
request.urlconf = "middleware.extra_urls"
self.assertIsNone(CommonMiddleware(get_response_404).process_request(request))
self.assertEqual(CommonMiddleware(get_response_404)(request).status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_append_slash_quoted_custom_urlconf(self):
"""
URLs which require quoting should be redirected to their slash version.
"""
request = self.rf.get(quote("/customurlconf/needsquoting#"))
request.urlconf = "middleware.extra_urls"
r = CommonMiddleware(get_response_404)(request)
self.assertIsNotNone(
r,
"CommonMiddleware failed to return APPEND_SLASH redirect using "
"request.urlconf",
)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/customurlconf/needsquoting%23/")
@override_settings(APPEND_SLASH=False, PREPEND_WWW=True)
def test_prepend_www_custom_urlconf(self):
request = self.rf.get("/customurlconf/path/")
request.urlconf = "middleware.extra_urls"
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/customurlconf/path/")
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_have_slash_custom_urlconf(self):
request = self.rf.get("/customurlconf/slash/")
request.urlconf = "middleware.extra_urls"
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/customurlconf/slash/")
@override_settings(APPEND_SLASH=True, PREPEND_WWW=True)
def test_prepend_www_append_slash_slashless_custom_urlconf(self):
request = self.rf.get("/customurlconf/slash")
request.urlconf = "middleware.extra_urls"
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "http://www.testserver/customurlconf/slash/")
# Tests for the Content-Length header
def test_content_length_header_added(self):
def get_response(req):
response = HttpResponse("content")
self.assertNotIn("Content-Length", response)
return response
response = CommonMiddleware(get_response)(self.rf.get("/"))
self.assertEqual(int(response.headers["Content-Length"]), len(response.content))
def test_content_length_header_not_added_for_streaming_response(self):
def get_response(req):
response = StreamingHttpResponse("content")
self.assertNotIn("Content-Length", response)
return response
response = CommonMiddleware(get_response)(self.rf.get("/"))
self.assertNotIn("Content-Length", response)
def test_content_length_header_not_changed(self):
bad_content_length = 500
def get_response(req):
response = HttpResponse()
response.headers["Content-Length"] = bad_content_length
return response
response = CommonMiddleware(get_response)(self.rf.get("/"))
self.assertEqual(int(response.headers["Content-Length"]), bad_content_length)
# Other tests
@override_settings(DISALLOWED_USER_AGENTS=[re.compile(r"foo")])
def test_disallowed_user_agents(self):
request = self.rf.get("/slash")
request.META["HTTP_USER_AGENT"] = "foo"
with self.assertRaisesMessage(PermissionDenied, "Forbidden user agent"):
CommonMiddleware(get_response_empty).process_request(request)
def test_non_ascii_query_string_does_not_crash(self):
"""Regression test for #15152"""
request = self.rf.get("/slash")
request.META["QUERY_STRING"] = "drink=café"
r = CommonMiddleware(get_response_empty).process_request(request)
self.assertEqual(r.status_code, 301)
def test_response_redirect_class(self):
request = self.rf.get("/slash")
r = CommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 301)
self.assertEqual(r.url, "/slash/")
self.assertIsInstance(r, HttpResponsePermanentRedirect)
def test_response_redirect_class_subclass(self):
class MyCommonMiddleware(CommonMiddleware):
response_redirect_class = HttpResponseRedirect
request = self.rf.get("/slash")
r = MyCommonMiddleware(get_response_404)(request)
self.assertEqual(r.status_code, 302)
self.assertEqual(r.url, "/slash/")
self.assertIsInstance(r, HttpResponseRedirect)
@override_settings(
IGNORABLE_404_URLS=[re.compile(r"foo")],
MANAGERS=[("PHD", "[email protected]")],
)
class BrokenLinkEmailsMiddlewareTest(SimpleTestCase):
rf = RequestFactory()
def setUp(self):
self.req = self.rf.get("/regular_url/that/does/not/exist")
def get_response(self, req):
return self.client.get(req.path)
def test_404_error_reporting(self):
self.req.META["HTTP_REFERER"] = "/another/url/"
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("Broken", mail.outbox[0].subject)
def test_404_error_reporting_no_referer(self):
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
def test_404_error_reporting_ignored_url(self):
self.req.path = self.req.path_info = "foo_url/that/does/not/exist"
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
def test_custom_request_checker(self):
class SubclassedMiddleware(BrokenLinkEmailsMiddleware):
ignored_user_agent_patterns = (
re.compile(r"Spider.*"),
re.compile(r"Robot.*"),
)
def is_ignorable_request(self, request, uri, domain, referer):
"""Check user-agent in addition to normal checks."""
if super().is_ignorable_request(request, uri, domain, referer):
return True
user_agent = request.META["HTTP_USER_AGENT"]
return any(
pattern.search(user_agent)
for pattern in self.ignored_user_agent_patterns
)
self.req.META["HTTP_REFERER"] = "/another/url/"
self.req.META["HTTP_USER_AGENT"] = "Spider machine 3.4"
SubclassedMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
self.req.META["HTTP_USER_AGENT"] = "My user agent"
SubclassedMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
def test_referer_equal_to_requested_url(self):
"""
Some bots set the referer to the current URL to avoid being blocked by
an referer check (#25302).
"""
self.req.META["HTTP_REFERER"] = self.req.path
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
# URL with scheme and domain should also be ignored
self.req.META["HTTP_REFERER"] = "http://testserver%s" % self.req.path
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
# URL with a different scheme should be ignored as well because bots
# tend to use http:// in referers even when browsing HTTPS websites.
self.req.META["HTTP_X_PROTO"] = "https"
self.req.META["SERVER_PORT"] = 443
with self.settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_PROTO", "https")):
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
def test_referer_equal_to_requested_url_on_another_domain(self):
self.req.META["HTTP_REFERER"] = "http://anotherserver%s" % self.req.path
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
@override_settings(APPEND_SLASH=True)
def test_referer_equal_to_requested_url_without_trailing_slash_with_append_slash(
self,
):
self.req.path = self.req.path_info = "/regular_url/that/does/not/exist/"
self.req.META["HTTP_REFERER"] = self.req.path_info[:-1]
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 0)
@override_settings(APPEND_SLASH=False)
def test_referer_equal_to_requested_url_without_trailing_slash_with_no_append_slash(
self,
):
self.req.path = self.req.path_info = "/regular_url/that/does/not/exist/"
self.req.META["HTTP_REFERER"] = self.req.path_info[:-1]
BrokenLinkEmailsMiddleware(self.get_response)(self.req)
self.assertEqual(len(mail.outbox), 1)
@override_settings(ROOT_URLCONF="middleware.cond_get_urls")
class ConditionalGetMiddlewareTest(SimpleTestCase):
request_factory = RequestFactory()
def setUp(self):
self.req = self.request_factory.get("/")
self.resp_headers = {}
def get_response(self, req):
resp = self.client.get(req.path_info)
for key, value in self.resp_headers.items():
resp[key] = value
return resp
# Tests for the ETag header
def test_middleware_calculates_etag(self):
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
self.assertNotEqual("", resp["ETag"])
def test_middleware_wont_overwrite_etag(self):
self.resp_headers["ETag"] = "eggs"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
self.assertEqual("eggs", resp["ETag"])
def test_no_etag_streaming_response(self):
def get_response(req):
return StreamingHttpResponse(["content"])
self.assertFalse(
ConditionalGetMiddleware(get_response)(self.req).has_header("ETag")
)
def test_no_etag_response_empty_content(self):
def get_response(req):
return HttpResponse()
self.assertFalse(
ConditionalGetMiddleware(get_response)(self.req).has_header("ETag")
)
def test_no_etag_no_store_cache(self):
self.resp_headers["Cache-Control"] = "No-Cache, No-Store, Max-age=0"
self.assertFalse(
ConditionalGetMiddleware(self.get_response)(self.req).has_header("ETag")
)
def test_etag_extended_cache_control(self):
self.resp_headers["Cache-Control"] = 'my-directive="my-no-store"'
self.assertTrue(
ConditionalGetMiddleware(self.get_response)(self.req).has_header("ETag")
)
def test_if_none_match_and_no_etag(self):
self.req.META["HTTP_IF_NONE_MATCH"] = "spam"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_no_if_none_match_and_etag(self):
self.resp_headers["ETag"] = "eggs"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_if_none_match_and_same_etag(self):
self.req.META["HTTP_IF_NONE_MATCH"] = '"spam"'
self.resp_headers["ETag"] = '"spam"'
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 304)
def test_if_none_match_and_different_etag(self):
self.req.META["HTTP_IF_NONE_MATCH"] = "spam"
self.resp_headers["ETag"] = "eggs"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_if_none_match_and_redirect(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp["ETag"] = "spam"
resp["Location"] = "/"
resp.status_code = 301
return resp
self.req.META["HTTP_IF_NONE_MATCH"] = "spam"
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 301)
def test_if_none_match_and_client_error(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp["ETag"] = "spam"
resp.status_code = 400
return resp
self.req.META["HTTP_IF_NONE_MATCH"] = "spam"
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 400)
# Tests for the Last-Modified header
def test_if_modified_since_and_no_last_modified(self):
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_no_if_modified_since_and_last_modified(self):
self.resp_headers["Last-Modified"] = "Sat, 12 Feb 2011 17:38:44 GMT"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 200)
def test_if_modified_since_and_same_last_modified(self):
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
self.resp_headers["Last-Modified"] = "Sat, 12 Feb 2011 17:38:44 GMT"
self.resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(self.resp.status_code, 304)
def test_if_modified_since_and_last_modified_in_the_past(self):
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
self.resp_headers["Last-Modified"] = "Sat, 12 Feb 2011 17:35:44 GMT"
resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(resp.status_code, 304)
def test_if_modified_since_and_last_modified_in_the_future(self):
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
self.resp_headers["Last-Modified"] = "Sat, 12 Feb 2011 17:41:44 GMT"
self.resp = ConditionalGetMiddleware(self.get_response)(self.req)
self.assertEqual(self.resp.status_code, 200)
def test_if_modified_since_and_redirect(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp["Last-Modified"] = "Sat, 12 Feb 2011 17:35:44 GMT"
resp["Location"] = "/"
resp.status_code = 301
return resp
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 301)
def test_if_modified_since_and_client_error(self):
def get_response(req):
resp = self.client.get(req.path_info)
resp["Last-Modified"] = "Sat, 12 Feb 2011 17:35:44 GMT"
resp.status_code = 400
return resp
self.req.META["HTTP_IF_MODIFIED_SINCE"] = "Sat, 12 Feb 2011 17:38:44 GMT"
resp = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(resp.status_code, 400)
def test_not_modified_headers(self):
"""
The 304 Not Modified response should include only the headers required
by section 4.1 of RFC 7232, Last-Modified, and the cookies.
"""
def get_response(req):
resp = self.client.get(req.path_info)
resp["Date"] = "Sat, 12 Feb 2011 17:35:44 GMT"
resp["Last-Modified"] = "Sat, 12 Feb 2011 17:35:44 GMT"
resp["Expires"] = "Sun, 13 Feb 2011 17:35:44 GMT"
resp["Vary"] = "Cookie"
resp["Cache-Control"] = "public"
resp["Content-Location"] = "/alt"
resp["Content-Language"] = "en" # shouldn't be preserved
resp["ETag"] = '"spam"'
resp.set_cookie("key", "value")
return resp
self.req.META["HTTP_IF_NONE_MATCH"] = '"spam"'
new_response = ConditionalGetMiddleware(get_response)(self.req)
self.assertEqual(new_response.status_code, 304)
base_response = get_response(self.req)
for header in (
"Cache-Control",
"Content-Location",
"Date",
"ETag",
"Expires",
"Last-Modified",
"Vary",
):
self.assertEqual(
new_response.headers[header], base_response.headers[header]
)
self.assertEqual(new_response.cookies, base_response.cookies)
self.assertNotIn("Content-Language", new_response)
def test_no_unsafe(self):
"""
ConditionalGetMiddleware shouldn't return a conditional response on an
unsafe request. A response has already been generated by the time
ConditionalGetMiddleware is called, so it's too late to return a 412
Precondition Failed.
"""
def get_200_response(req):
return HttpResponse(status=200)
response = ConditionalGetMiddleware(self.get_response)(self.req)
etag = response.headers["ETag"]
put_request = self.request_factory.put("/", HTTP_IF_MATCH=etag)
conditional_get_response = ConditionalGetMiddleware(get_200_response)(
put_request
)
self.assertEqual(
conditional_get_response.status_code, 200
) # should never be a 412
def test_no_head(self):
"""
ConditionalGetMiddleware shouldn't compute and return an ETag on a
HEAD request since it can't do so accurately without access to the
response body of the corresponding GET.
"""
def get_200_response(req):
return HttpResponse(status=200)
request = self.request_factory.head("/")
conditional_get_response = ConditionalGetMiddleware(get_200_response)(request)
self.assertNotIn("ETag", conditional_get_response)
class XFrameOptionsMiddlewareTest(SimpleTestCase):
"""
Tests for the X-Frame-Options clickjacking prevention middleware.
"""
def test_same_origin(self):
"""
The X_FRAME_OPTIONS setting can be set to SAMEORIGIN to have the
middleware use that value for the HTTP header.
"""
with override_settings(X_FRAME_OPTIONS="SAMEORIGIN"):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
with override_settings(X_FRAME_OPTIONS="sameorigin"):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
def test_deny(self):
"""
The X_FRAME_OPTIONS setting can be set to DENY to have the middleware
use that value for the HTTP header.
"""
with override_settings(X_FRAME_OPTIONS="DENY"):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "DENY")
with override_settings(X_FRAME_OPTIONS="deny"):
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "DENY")
def test_defaults_sameorigin(self):
"""
If the X_FRAME_OPTIONS setting is not set then it defaults to
DENY.
"""
with override_settings(X_FRAME_OPTIONS=None):
del settings.X_FRAME_OPTIONS # restored by override_settings
r = XFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "DENY")
def test_dont_set_if_set(self):
"""
If the X-Frame-Options header is already set then the middleware does
not attempt to override it.
"""
def same_origin_response(request):
response = HttpResponse()
response.headers["X-Frame-Options"] = "SAMEORIGIN"
return response
def deny_response(request):
response = HttpResponse()
response.headers["X-Frame-Options"] = "DENY"
return response
with override_settings(X_FRAME_OPTIONS="DENY"):
r = XFrameOptionsMiddleware(same_origin_response)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
with override_settings(X_FRAME_OPTIONS="SAMEORIGIN"):
r = XFrameOptionsMiddleware(deny_response)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "DENY")
def test_response_exempt(self):
"""
If the response has an xframe_options_exempt attribute set to False
then it still sets the header, but if it's set to True then it doesn't.
"""
def xframe_exempt_response(request):
response = HttpResponse()
response.xframe_options_exempt = True
return response
def xframe_not_exempt_response(request):
response = HttpResponse()
response.xframe_options_exempt = False
return response
with override_settings(X_FRAME_OPTIONS="SAMEORIGIN"):
r = XFrameOptionsMiddleware(xframe_not_exempt_response)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
r = XFrameOptionsMiddleware(xframe_exempt_response)(HttpRequest())
self.assertIsNone(r.headers.get("X-Frame-Options"))
def test_is_extendable(self):
"""
The XFrameOptionsMiddleware method that determines the X-Frame-Options
header value can be overridden based on something in the request or
response.
"""
class OtherXFrameOptionsMiddleware(XFrameOptionsMiddleware):
# This is just an example for testing purposes...
def get_xframe_options_value(self, request, response):
if getattr(request, "sameorigin", False):
return "SAMEORIGIN"
if getattr(response, "sameorigin", False):
return "SAMEORIGIN"
return "DENY"
def same_origin_response(request):
response = HttpResponse()
response.sameorigin = True
return response
with override_settings(X_FRAME_OPTIONS="DENY"):
r = OtherXFrameOptionsMiddleware(same_origin_response)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
request = HttpRequest()
request.sameorigin = True
r = OtherXFrameOptionsMiddleware(get_response_empty)(request)
self.assertEqual(r.headers["X-Frame-Options"], "SAMEORIGIN")
with override_settings(X_FRAME_OPTIONS="SAMEORIGIN"):
r = OtherXFrameOptionsMiddleware(get_response_empty)(HttpRequest())
self.assertEqual(r.headers["X-Frame-Options"], "DENY")
class GZipMiddlewareTest(SimpleTestCase):
"""
Tests the GZipMiddleware.
"""
short_string = b"This string is too short to be worth compressing."
compressible_string = b"a" * 500
incompressible_string = b"".join(
int2byte(random.randint(0, 255)) for _ in range(500)
)
sequence = [b"a" * 500, b"b" * 200, b"a" * 300]
sequence_unicode = ["a" * 500, "é" * 200, "a" * 300]
request_factory = RequestFactory()
def setUp(self):
self.req = self.request_factory.get("/")
self.req.META["HTTP_ACCEPT_ENCODING"] = "gzip, deflate"
self.req.META[
"HTTP_USER_AGENT"
] = "Mozilla/5.0 (Windows NT 5.1; rv:9.0.1) Gecko/20100101 Firefox/9.0.1"
self.resp = HttpResponse()
self.resp.status_code = 200
self.resp.content = self.compressible_string
self.resp["Content-Type"] = "text/html; charset=UTF-8"
def get_response(self, request):
return self.resp
@staticmethod
def decompress(gzipped_string):
with gzip.GzipFile(mode="rb", fileobj=BytesIO(gzipped_string)) as f:
return f.read()
@staticmethod
def get_mtime(gzipped_string):
with gzip.GzipFile(mode="rb", fileobj=BytesIO(gzipped_string)) as f:
f.read() # must read the data before accessing the header
return f.mtime
def test_compress_response(self):
"""
Compression is performed on responses with compressible content.
"""
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(self.decompress(r.content), self.compressible_string)
self.assertEqual(r.get("Content-Encoding"), "gzip")
self.assertEqual(r.get("Content-Length"), str(len(r.content)))
def test_compress_streaming_response(self):
"""
Compression is performed on responses with streaming content.
"""
def get_stream_response(request):
resp = StreamingHttpResponse(self.sequence)
resp["Content-Type"] = "text/html; charset=UTF-8"
return resp
r = GZipMiddleware(get_stream_response)(self.req)
self.assertEqual(self.decompress(b"".join(r)), b"".join(self.sequence))
self.assertEqual(r.get("Content-Encoding"), "gzip")
self.assertFalse(r.has_header("Content-Length"))
def test_compress_streaming_response_unicode(self):
"""
Compression is performed on responses with streaming Unicode content.
"""
def get_stream_response_unicode(request):
resp = StreamingHttpResponse(self.sequence_unicode)
resp["Content-Type"] = "text/html; charset=UTF-8"
return resp
r = GZipMiddleware(get_stream_response_unicode)(self.req)
self.assertEqual(
self.decompress(b"".join(r)),
b"".join(x.encode() for x in self.sequence_unicode),
)
self.assertEqual(r.get("Content-Encoding"), "gzip")
self.assertFalse(r.has_header("Content-Length"))
def test_compress_file_response(self):
"""
Compression is performed on FileResponse.
"""
with open(__file__, "rb") as file1:
def get_response(req):
file_resp = FileResponse(file1)
file_resp["Content-Type"] = "text/html; charset=UTF-8"
return file_resp
r = GZipMiddleware(get_response)(self.req)
with open(__file__, "rb") as file2:
self.assertEqual(self.decompress(b"".join(r)), file2.read())
self.assertEqual(r.get("Content-Encoding"), "gzip")
self.assertIsNot(r.file_to_stream, file1)
def test_compress_non_200_response(self):
"""
Compression is performed on responses with a status other than 200
(#10762).
"""
self.resp.status_code = 404
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(self.decompress(r.content), self.compressible_string)
self.assertEqual(r.get("Content-Encoding"), "gzip")
def test_no_compress_short_response(self):
"""
Compression isn't performed on responses with short content.
"""
self.resp.content = self.short_string
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r.content, self.short_string)
self.assertIsNone(r.get("Content-Encoding"))
def test_no_compress_compressed_response(self):
"""
Compression isn't performed on responses that are already compressed.
"""
self.resp["Content-Encoding"] = "deflate"
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r.content, self.compressible_string)
self.assertEqual(r.get("Content-Encoding"), "deflate")
def test_no_compress_incompressible_response(self):
"""
Compression isn't performed on responses with incompressible content.
"""
self.resp.content = self.incompressible_string
r = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r.content, self.incompressible_string)
self.assertIsNone(r.get("Content-Encoding"))
def test_compress_deterministic(self):
"""
Compression results are the same for the same content and don't
include a modification time (since that would make the results
of compression non-deterministic and prevent
ConditionalGetMiddleware from recognizing conditional matches
on gzipped content).
"""
r1 = GZipMiddleware(self.get_response)(self.req)
r2 = GZipMiddleware(self.get_response)(self.req)
self.assertEqual(r1.content, r2.content)
self.assertEqual(self.get_mtime(r1.content), 0)
self.assertEqual(self.get_mtime(r2.content), 0)
class ETagGZipMiddlewareTest(SimpleTestCase):
"""
ETags are handled properly by GZipMiddleware.
"""
rf = RequestFactory()
compressible_string = b"a" * 500
def test_strong_etag_modified(self):
"""
GZipMiddleware makes a strong ETag weak.
"""
def get_response(req):
response = HttpResponse(self.compressible_string)
response.headers["ETag"] = '"eggs"'
return response
request = self.rf.get("/", HTTP_ACCEPT_ENCODING="gzip, deflate")
gzip_response = GZipMiddleware(get_response)(request)
self.assertEqual(gzip_response.headers["ETag"], 'W/"eggs"')
def test_weak_etag_not_modified(self):
"""
GZipMiddleware doesn't modify a weak ETag.
"""
def get_response(req):
response = HttpResponse(self.compressible_string)
response.headers["ETag"] = 'W/"eggs"'
return response
request = self.rf.get("/", HTTP_ACCEPT_ENCODING="gzip, deflate")
gzip_response = GZipMiddleware(get_response)(request)
self.assertEqual(gzip_response.headers["ETag"], 'W/"eggs"')
def test_etag_match(self):
"""
GZipMiddleware allows 304 Not Modified responses.
"""
def get_response(req):
response = HttpResponse(self.compressible_string)
return response
def get_cond_response(req):
return ConditionalGetMiddleware(get_response)(req)
request = self.rf.get("/", HTTP_ACCEPT_ENCODING="gzip, deflate")
response = GZipMiddleware(get_cond_response)(request)
gzip_etag = response.headers["ETag"]
next_request = self.rf.get(
"/", HTTP_ACCEPT_ENCODING="gzip, deflate", HTTP_IF_NONE_MATCH=gzip_etag
)
next_response = ConditionalGetMiddleware(get_response)(next_request)
self.assertEqual(next_response.status_code, 304)
|
b0f501268e5a569ee8114031f7ac78bce1531cde0efc1e2ed6e490b50d83bd3d | from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase
from django.test.utils import override_settings
class SecurityMiddlewareTest(SimpleTestCase):
def middleware(self, *args, **kwargs):
from django.middleware.security import SecurityMiddleware
return SecurityMiddleware(self.response(*args, **kwargs))
@property
def secure_request_kwargs(self):
return {"wsgi.url_scheme": "https"}
def response(self, *args, headers=None, **kwargs):
def get_response(req):
response = HttpResponse(*args, **kwargs)
if headers:
for k, v in headers.items():
response.headers[k] = v
return response
return get_response
def process_response(self, *args, secure=False, request=None, **kwargs):
request_kwargs = {}
if secure:
request_kwargs.update(self.secure_request_kwargs)
if request is None:
request = self.request.get("/some/url", **request_kwargs)
ret = self.middleware(*args, **kwargs).process_request(request)
if ret:
return ret
return self.middleware(*args, **kwargs)(request)
request = RequestFactory()
def process_request(self, method, *args, secure=False, **kwargs):
if secure:
kwargs.update(self.secure_request_kwargs)
req = getattr(self.request, method.lower())(*args, **kwargs)
return self.middleware().process_request(req)
@override_settings(SECURE_HSTS_SECONDS=3600)
def test_sts_on(self):
"""
With SECURE_HSTS_SECONDS=3600, the middleware adds
"Strict-Transport-Security: max-age=3600" to the response.
"""
self.assertEqual(
self.process_response(secure=True).headers["Strict-Transport-Security"],
"max-age=3600",
)
@override_settings(SECURE_HSTS_SECONDS=3600)
def test_sts_already_present(self):
"""
The middleware will not override a "Strict-Transport-Security" header
already present in the response.
"""
response = self.process_response(
secure=True, headers={"Strict-Transport-Security": "max-age=7200"}
)
self.assertEqual(response.headers["Strict-Transport-Security"], "max-age=7200")
@override_settings(SECURE_HSTS_SECONDS=3600)
def test_sts_only_if_secure(self):
"""
The "Strict-Transport-Security" header is not added to responses going
over an insecure connection.
"""
self.assertNotIn(
"Strict-Transport-Security",
self.process_response(secure=False).headers,
)
@override_settings(SECURE_HSTS_SECONDS=0)
def test_sts_off(self):
"""
With SECURE_HSTS_SECONDS=0, the middleware does not add a
"Strict-Transport-Security" header to the response.
"""
self.assertNotIn(
"Strict-Transport-Security",
self.process_response(secure=True).headers,
)
@override_settings(SECURE_HSTS_SECONDS=600, SECURE_HSTS_INCLUDE_SUBDOMAINS=True)
def test_sts_include_subdomains(self):
"""
With SECURE_HSTS_SECONDS non-zero and SECURE_HSTS_INCLUDE_SUBDOMAINS
True, the middleware adds a "Strict-Transport-Security" header with the
"includeSubDomains" directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(
response.headers["Strict-Transport-Security"],
"max-age=600; includeSubDomains",
)
@override_settings(SECURE_HSTS_SECONDS=600, SECURE_HSTS_INCLUDE_SUBDOMAINS=False)
def test_sts_no_include_subdomains(self):
"""
With SECURE_HSTS_SECONDS non-zero and SECURE_HSTS_INCLUDE_SUBDOMAINS
False, the middleware adds a "Strict-Transport-Security" header without
the "includeSubDomains" directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(response.headers["Strict-Transport-Security"], "max-age=600")
@override_settings(SECURE_HSTS_SECONDS=10886400, SECURE_HSTS_PRELOAD=True)
def test_sts_preload(self):
"""
With SECURE_HSTS_SECONDS non-zero and SECURE_HSTS_PRELOAD True, the
middleware adds a "Strict-Transport-Security" header with the "preload"
directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(
response.headers["Strict-Transport-Security"],
"max-age=10886400; preload",
)
@override_settings(
SECURE_HSTS_SECONDS=10886400,
SECURE_HSTS_INCLUDE_SUBDOMAINS=True,
SECURE_HSTS_PRELOAD=True,
)
def test_sts_subdomains_and_preload(self):
"""
With SECURE_HSTS_SECONDS non-zero, SECURE_HSTS_INCLUDE_SUBDOMAINS and
SECURE_HSTS_PRELOAD True, the middleware adds a "Strict-Transport-Security"
header containing both the "includeSubDomains" and "preload" directives
to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(
response.headers["Strict-Transport-Security"],
"max-age=10886400; includeSubDomains; preload",
)
@override_settings(SECURE_HSTS_SECONDS=10886400, SECURE_HSTS_PRELOAD=False)
def test_sts_no_preload(self):
"""
With SECURE_HSTS_SECONDS non-zero and SECURE_HSTS_PRELOAD
False, the middleware adds a "Strict-Transport-Security" header without
the "preload" directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(
response.headers["Strict-Transport-Security"],
"max-age=10886400",
)
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=True)
def test_content_type_on(self):
"""
With SECURE_CONTENT_TYPE_NOSNIFF set to True, the middleware adds
"X-Content-Type-Options: nosniff" header to the response.
"""
self.assertEqual(
self.process_response().headers["X-Content-Type-Options"],
"nosniff",
)
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=True)
def test_content_type_already_present(self):
"""
The middleware will not override an "X-Content-Type-Options" header
already present in the response.
"""
response = self.process_response(
secure=True, headers={"X-Content-Type-Options": "foo"}
)
self.assertEqual(response.headers["X-Content-Type-Options"], "foo")
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=False)
def test_content_type_off(self):
"""
With SECURE_CONTENT_TYPE_NOSNIFF False, the middleware does not add an
"X-Content-Type-Options" header to the response.
"""
self.assertNotIn("X-Content-Type-Options", self.process_response().headers)
@override_settings(SECURE_SSL_REDIRECT=True)
def test_ssl_redirect_on(self):
"""
With SECURE_SSL_REDIRECT True, the middleware redirects any non-secure
requests to the https:// version of the same URL.
"""
ret = self.process_request("get", "/some/url?query=string")
self.assertEqual(ret.status_code, 301)
self.assertEqual(ret["Location"], "https://testserver/some/url?query=string")
@override_settings(SECURE_SSL_REDIRECT=True)
def test_no_redirect_ssl(self):
"""
The middleware does not redirect secure requests.
"""
ret = self.process_request("get", "/some/url", secure=True)
self.assertIsNone(ret)
@override_settings(SECURE_SSL_REDIRECT=True, SECURE_REDIRECT_EXEMPT=["^insecure/"])
def test_redirect_exempt(self):
"""
The middleware does not redirect requests with URL path matching an
exempt pattern.
"""
ret = self.process_request("get", "/insecure/page")
self.assertIsNone(ret)
@override_settings(SECURE_SSL_REDIRECT=True, SECURE_SSL_HOST="secure.example.com")
def test_redirect_ssl_host(self):
"""
The middleware redirects to SECURE_SSL_HOST if given.
"""
ret = self.process_request("get", "/some/url")
self.assertEqual(ret.status_code, 301)
self.assertEqual(ret["Location"], "https://secure.example.com/some/url")
@override_settings(SECURE_SSL_REDIRECT=False)
def test_ssl_redirect_off(self):
"""
With SECURE_SSL_REDIRECT False, the middleware does not redirect.
"""
ret = self.process_request("get", "/some/url")
self.assertIsNone(ret)
@override_settings(SECURE_REFERRER_POLICY=None)
def test_referrer_policy_off(self):
"""
With SECURE_REFERRER_POLICY set to None, the middleware does not add a
"Referrer-Policy" header to the response.
"""
self.assertNotIn("Referrer-Policy", self.process_response().headers)
def test_referrer_policy_on(self):
"""
With SECURE_REFERRER_POLICY set to a valid value, the middleware adds a
"Referrer-Policy" header to the response.
"""
tests = (
("strict-origin", "strict-origin"),
("strict-origin,origin", "strict-origin,origin"),
("strict-origin, origin", "strict-origin,origin"),
(["strict-origin", "origin"], "strict-origin,origin"),
(("strict-origin", "origin"), "strict-origin,origin"),
)
for value, expected in tests:
with self.subTest(value=value), override_settings(
SECURE_REFERRER_POLICY=value
):
self.assertEqual(
self.process_response().headers["Referrer-Policy"],
expected,
)
@override_settings(SECURE_REFERRER_POLICY="strict-origin")
def test_referrer_policy_already_present(self):
"""
The middleware will not override a "Referrer-Policy" header already
present in the response.
"""
response = self.process_response(headers={"Referrer-Policy": "unsafe-url"})
self.assertEqual(response.headers["Referrer-Policy"], "unsafe-url")
@override_settings(SECURE_CROSS_ORIGIN_OPENER_POLICY=None)
def test_coop_off(self):
"""
With SECURE_CROSS_ORIGIN_OPENER_POLICY set to None, the middleware does
not add a "Cross-Origin-Opener-Policy" header to the response.
"""
self.assertNotIn("Cross-Origin-Opener-Policy", self.process_response())
def test_coop_default(self):
"""SECURE_CROSS_ORIGIN_OPENER_POLICY defaults to same-origin."""
self.assertEqual(
self.process_response().headers["Cross-Origin-Opener-Policy"],
"same-origin",
)
def test_coop_on(self):
"""
With SECURE_CROSS_ORIGIN_OPENER_POLICY set to a valid value, the
middleware adds a "Cross-Origin_Opener-Policy" header to the response.
"""
tests = ["same-origin", "same-origin-allow-popups", "unsafe-none"]
for value in tests:
with self.subTest(value=value), override_settings(
SECURE_CROSS_ORIGIN_OPENER_POLICY=value,
):
self.assertEqual(
self.process_response().headers["Cross-Origin-Opener-Policy"],
value,
)
@override_settings(SECURE_CROSS_ORIGIN_OPENER_POLICY="unsafe-none")
def test_coop_already_present(self):
"""
The middleware doesn't override a "Cross-Origin-Opener-Policy" header
already present in the response.
"""
response = self.process_response(
headers={"Cross-Origin-Opener-Policy": "same-origin"}
)
self.assertEqual(response.headers["Cross-Origin-Opener-Policy"], "same-origin")
|
52b8dbdbc1da6b4a275d7b2e7f54cad471296171f58d8e8afda5147ccc998930 | from django.urls import path, re_path
from . import views
urlpatterns = [
path("noslash", views.empty_view),
path("slash/", views.empty_view),
path("needsquoting#/", views.empty_view),
# Accepts paths with two leading slashes.
re_path(r"^(.+)/security/$", views.empty_view),
# Should not append slash.
path("sensitive_fbv/", views.sensitive_fbv),
path("sensitive_cbv/", views.SensitiveCBV.as_view()),
]
|
0e87128cdad6c7e77c4051b8eb4bd87eea7870a0d521ba353c2b82a73fd3fd3f | from django.urls import path
from . import views
urlpatterns = [
path("customurlconf/noslash", views.empty_view),
path("customurlconf/slash/", views.empty_view),
path("customurlconf/needsquoting#/", views.empty_view),
]
|
894e3ccdc5e1af4ad880f8ef34c1ebc5ed03d8e8f35e0fe037d011ca2ffa9127 | from django.http import HttpResponse
from django.utils.decorators import method_decorator
from django.views.decorators.common import no_append_slash
from django.views.generic import View
def empty_view(request, *args, **kwargs):
return HttpResponse()
@no_append_slash
def sensitive_fbv(request, *args, **kwargs):
return HttpResponse()
@method_decorator(no_append_slash, name="dispatch")
class SensitiveCBV(View):
def get(self, *args, **kwargs):
return HttpResponse()
|
d669f2392212157d8d03b1b625cd74255d324042d714f9f40488c6a6c55069da | from django.template import TemplateDoesNotExist
from django.template.loader import get_template, render_to_string, select_template
from django.test import SimpleTestCase, override_settings
from django.test.client import RequestFactory
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.dummy.TemplateStrings",
"APP_DIRS": True,
},
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
],
"loaders": [
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
},
},
]
)
class TemplateLoaderTests(SimpleTestCase):
def test_get_template_first_engine(self):
template = get_template("template_loader/hello.html")
self.assertEqual(template.render(), "Hello! (template strings)\n")
def test_get_template_second_engine(self):
template = get_template("template_loader/goodbye.html")
self.assertEqual(template.render(), "Goodbye! (Django templates)\n")
def test_get_template_using_engine(self):
template = get_template("template_loader/hello.html", using="django")
self.assertEqual(template.render(), "Hello! (Django templates)\n")
def test_get_template_not_found(self):
with self.assertRaises(TemplateDoesNotExist) as e:
get_template("template_loader/unknown.html")
self.assertEqual(
e.exception.chain[-1].tried[0][0].template_name,
"template_loader/unknown.html",
)
self.assertEqual(e.exception.chain[-1].backend.name, "django")
def test_select_template_first_engine(self):
template = select_template(
["template_loader/unknown.html", "template_loader/hello.html"]
)
self.assertEqual(template.render(), "Hello! (template strings)\n")
def test_select_template_second_engine(self):
template = select_template(
["template_loader/unknown.html", "template_loader/goodbye.html"]
)
self.assertEqual(template.render(), "Goodbye! (Django templates)\n")
def test_select_template_using_engine(self):
template = select_template(
["template_loader/unknown.html", "template_loader/hello.html"],
using="django",
)
self.assertEqual(template.render(), "Hello! (Django templates)\n")
def test_select_template_empty(self):
with self.assertRaises(TemplateDoesNotExist):
select_template([])
def test_select_template_string(self):
with self.assertRaisesMessage(
TypeError,
"select_template() takes an iterable of template names but got a "
"string: 'template_loader/hello.html'. Use get_template() if you "
"want to load a single template by name.",
):
select_template("template_loader/hello.html")
def test_select_template_not_found(self):
with self.assertRaises(TemplateDoesNotExist) as e:
select_template(
["template_loader/unknown.html", "template_loader/missing.html"]
)
self.assertEqual(
e.exception.chain[0].tried[0][0].template_name,
"template_loader/unknown.html",
)
self.assertEqual(e.exception.chain[0].backend.name, "dummy")
self.assertEqual(
e.exception.chain[-1].tried[0][0].template_name,
"template_loader/missing.html",
)
self.assertEqual(e.exception.chain[-1].backend.name, "django")
def test_select_template_tries_all_engines_before_names(self):
template = select_template(
["template_loader/goodbye.html", "template_loader/hello.html"]
)
self.assertEqual(template.render(), "Goodbye! (Django templates)\n")
def test_render_to_string_first_engine(self):
content = render_to_string("template_loader/hello.html")
self.assertEqual(content, "Hello! (template strings)\n")
def test_render_to_string_second_engine(self):
content = render_to_string("template_loader/goodbye.html")
self.assertEqual(content, "Goodbye! (Django templates)\n")
def test_render_to_string_with_request(self):
request = RequestFactory().get("/foobar/")
content = render_to_string("template_loader/request.html", request=request)
self.assertEqual(content, "/foobar/\n")
def test_render_to_string_using_engine(self):
content = render_to_string("template_loader/hello.html", using="django")
self.assertEqual(content, "Hello! (Django templates)\n")
def test_render_to_string_not_found(self):
with self.assertRaises(TemplateDoesNotExist) as e:
render_to_string("template_loader/unknown.html")
self.assertEqual(
e.exception.chain[-1].tried[0][0].template_name,
"template_loader/unknown.html",
)
self.assertEqual(e.exception.chain[-1].backend.name, "django")
def test_render_to_string_with_list_first_engine(self):
content = render_to_string(
["template_loader/unknown.html", "template_loader/hello.html"]
)
self.assertEqual(content, "Hello! (template strings)\n")
def test_render_to_string_with_list_second_engine(self):
content = render_to_string(
["template_loader/unknown.html", "template_loader/goodbye.html"]
)
self.assertEqual(content, "Goodbye! (Django templates)\n")
def test_render_to_string_with_list_using_engine(self):
content = render_to_string(
["template_loader/unknown.html", "template_loader/hello.html"],
using="django",
)
self.assertEqual(content, "Hello! (Django templates)\n")
def test_render_to_string_with_list_empty(self):
with self.assertRaises(TemplateDoesNotExist):
render_to_string([])
def test_render_to_string_with_list_not_found(self):
with self.assertRaises(TemplateDoesNotExist) as e:
render_to_string(
["template_loader/unknown.html", "template_loader/missing.html"]
)
self.assertEqual(
e.exception.chain[0].tried[0][0].template_name,
"template_loader/unknown.html",
)
self.assertEqual(e.exception.chain[0].backend.name, "dummy")
self.assertEqual(
e.exception.chain[1].tried[0][0].template_name,
"template_loader/unknown.html",
)
self.assertEqual(e.exception.chain[1].backend.name, "django")
self.assertEqual(
e.exception.chain[2].tried[0][0].template_name,
"template_loader/missing.html",
)
self.assertEqual(e.exception.chain[2].backend.name, "dummy")
self.assertEqual(
e.exception.chain[3].tried[0][0].template_name,
"template_loader/missing.html",
)
self.assertEqual(e.exception.chain[3].backend.name, "django")
def test_render_to_string_with_list_tries_all_engines_before_names(self):
content = render_to_string(
["template_loader/goodbye.html", "template_loader/hello.html"]
)
self.assertEqual(content, "Goodbye! (Django templates)\n")
|
008a5fae962b89a82577f84cf67f83544c5f6f05bfe5b693a2f45d130ed1843c | import os
from django.core.management import call_command
from django.test import TestCase, TransactionTestCase
from django.test.utils import extend_sys_path
from .models import (
ConcreteModel,
ConcreteModelSubclass,
ConcreteModelSubclassProxy,
ProxyModel,
)
class ProxyModelInheritanceTests(TransactionTestCase):
"""
Proxy model inheritance across apps can result in migrate not creating the table
for the proxied model (as described in #12286). This test creates two dummy
apps and calls migrate, then verifies that the table has been created.
"""
available_apps = []
def test_table_exists(self):
with extend_sys_path(os.path.dirname(os.path.abspath(__file__))):
with self.modify_settings(INSTALLED_APPS={"append": ["app1", "app2"]}):
call_command("migrate", verbosity=0, run_syncdb=True)
from app1.models import ProxyModel
from app2.models import NiceModel
self.assertEqual(NiceModel.objects.count(), 0)
self.assertEqual(ProxyModel.objects.count(), 0)
class MultiTableInheritanceProxyTest(TestCase):
def test_model_subclass_proxy(self):
"""
Deleting an instance of a model proxying a multi-table inherited
subclass should cascade delete down the whole inheritance chain (see
#18083).
"""
instance = ConcreteModelSubclassProxy.objects.create()
instance.delete()
self.assertEqual(0, ConcreteModelSubclassProxy.objects.count())
self.assertEqual(0, ConcreteModelSubclass.objects.count())
self.assertEqual(0, ConcreteModel.objects.count())
def test_deletion_through_intermediate_proxy(self):
child = ConcreteModelSubclass.objects.create()
proxy = ProxyModel.objects.get(pk=child.pk)
proxy.delete()
self.assertFalse(ConcreteModel.objects.exists())
self.assertFalse(ConcreteModelSubclass.objects.exists())
|
3688f2147919a77af40842b00f5d05804c894f4ebdcad107721e907a53b8a660 | from django.db import models
class ConcreteModel(models.Model):
pass
class ProxyModel(ConcreteModel):
class Meta:
proxy = True
class ConcreteModelSubclass(ProxyModel):
pass
class ConcreteModelSubclassProxy(ConcreteModelSubclass):
class Meta:
proxy = True
|
614a7fa97153f4289bccade9874bb0b1ed78b0504048e292943000029b67b5f5 | import asyncio
import sys
import threading
from pathlib import Path
from unittest import skipIf
from asgiref.testing import ApplicationCommunicator
from django.contrib.staticfiles.handlers import ASGIStaticFilesHandler
from django.core.asgi import get_asgi_application
from django.core.signals import request_finished, request_started
from django.db import close_old_connections
from django.test import (
AsyncRequestFactory,
SimpleTestCase,
modify_settings,
override_settings,
)
from django.utils.http import http_date
from .urls import sync_waiter, test_filename
TEST_STATIC_ROOT = Path(__file__).parent / "project" / "static"
@skipIf(
sys.platform == "win32" and (3, 8, 0) < sys.version_info < (3, 8, 1),
"https://bugs.python.org/issue38563",
)
@override_settings(ROOT_URLCONF="asgi.urls")
class ASGITest(SimpleTestCase):
async_request_factory = AsyncRequestFactory()
def setUp(self):
request_started.disconnect(close_old_connections)
def tearDown(self):
request_started.connect(close_old_connections)
async def test_get_asgi_application(self):
"""
get_asgi_application() returns a functioning ASGI callable.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Read the response.
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", b"12"),
(b"Content-Type", b"text/html; charset=utf-8"),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
async def test_file_response(self):
"""
Makes sure that FileResponse works over ASGI.
"""
application = get_asgi_application()
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/file/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Get the file content.
with open(test_filename, "rb") as test_file:
test_file_contents = test_file.read()
# Read the response.
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
headers = response_start["headers"]
self.assertEqual(len(headers), 3)
expected_headers = {
b"Content-Length": str(len(test_file_contents)).encode("ascii"),
b"Content-Type": b"text/x-python",
b"Content-Disposition": b'inline; filename="urls.py"',
}
for key, value in headers:
try:
self.assertEqual(value, expected_headers[key])
except AssertionError:
# Windows registry may not be configured with correct
# mimetypes.
if sys.platform == "win32" and key == b"Content-Type":
self.assertEqual(value, b"text/plain")
else:
raise
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
@modify_settings(INSTALLED_APPS={"append": "django.contrib.staticfiles"})
@override_settings(
STATIC_URL="static/",
STATIC_ROOT=TEST_STATIC_ROOT,
STATICFILES_DIRS=[TEST_STATIC_ROOT],
STATICFILES_FINDERS=[
"django.contrib.staticfiles.finders.FileSystemFinder",
],
)
async def test_static_file_response(self):
application = ASGIStaticFilesHandler(get_asgi_application())
# Construct HTTP request.
scope = self.async_request_factory._base_scope(path="/static/file.txt")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
# Get the file content.
file_path = TEST_STATIC_ROOT / "file.txt"
with open(file_path, "rb") as test_file:
test_file_contents = test_file.read()
# Read the response.
stat = file_path.stat()
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", str(len(test_file_contents)).encode("ascii")),
(b"Content-Type", b"text/plain"),
(b"Content-Disposition", b'inline; filename="file.txt"'),
(b"Last-Modified", http_date(stat.st_mtime).encode("ascii")),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], test_file_contents)
# Allow response.close() to finish.
await communicator.wait()
async def test_headers(self):
application = get_asgi_application()
communicator = ApplicationCommunicator(
application,
self.async_request_factory._base_scope(
path="/meta/",
headers=[
[b"content-type", b"text/plain; charset=utf-8"],
[b"content-length", b"77"],
[b"referer", b"Scotland"],
[b"referer", b"Wales"],
],
),
)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
self.assertEqual(
set(response_start["headers"]),
{
(b"Content-Length", b"19"),
(b"Content-Type", b"text/plain; charset=utf-8"),
},
)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"From Scotland,Wales")
async def test_get_query_string(self):
application = get_asgi_application()
for query_string in (b"name=Andrew", "name=Andrew"):
with self.subTest(query_string=query_string):
scope = self.async_request_factory._base_scope(
path="/",
query_string=query_string,
)
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello Andrew!")
async def test_disconnect(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.disconnect"})
with self.assertRaises(asyncio.TimeoutError):
await communicator.receive_output()
async def test_wrong_connection_type(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/", type="other")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
msg = "Django can only handle ASGI/HTTP connections, not other."
with self.assertRaisesMessage(ValueError, msg):
await communicator.receive_output()
async def test_non_unicode_query_string(self):
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/", query_string=b"\xff")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 400)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"")
async def test_request_lifecycle_signals_dispatched_with_thread_sensitive(self):
class SignalHandler:
"""Track threads handler is dispatched on."""
threads = []
def __call__(self, **kwargs):
self.threads.append(threading.current_thread())
signal_handler = SignalHandler()
request_started.connect(signal_handler)
request_finished.connect(signal_handler)
# Perform a basic request.
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/")
communicator = ApplicationCommunicator(application, scope)
await communicator.send_input({"type": "http.request"})
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
# AsyncToSync should have executed the signals in the same thread.
request_started_thread, request_finished_thread = signal_handler.threads
self.assertEqual(request_started_thread, request_finished_thread)
request_started.disconnect(signal_handler)
request_finished.disconnect(signal_handler)
async def test_concurrent_async_uses_multiple_thread_pools(self):
sync_waiter.active_threads.clear()
# Send 2 requests concurrently
application = get_asgi_application()
scope = self.async_request_factory._base_scope(path="/wait/")
communicators = []
for _ in range(2):
communicators.append(ApplicationCommunicator(application, scope))
await communicators[-1].send_input({"type": "http.request"})
# Each request must complete with a status code of 200
# If requests aren't scheduled concurrently, the barrier in the
# sync_wait view will time out, resulting in a 500 status code.
for communicator in communicators:
response_start = await communicator.receive_output()
self.assertEqual(response_start["type"], "http.response.start")
self.assertEqual(response_start["status"], 200)
response_body = await communicator.receive_output()
self.assertEqual(response_body["type"], "http.response.body")
self.assertEqual(response_body["body"], b"Hello World!")
# Give response.close() time to finish.
await communicator.wait()
# The requests should have scheduled on different threads. Note
# active_threads is a set (a thread can only appear once), therefore
# length is a sufficient check.
self.assertEqual(len(sync_waiter.active_threads), 2)
sync_waiter.active_threads.clear()
|
9e9994f4b8efa1c23d83977a5c2e1d5f9c60532f866e60aeea4c8af8ea06d75a | import threading
from django.http import FileResponse, HttpResponse
from django.urls import path
def hello(request):
name = request.GET.get("name") or "World"
return HttpResponse("Hello %s!" % name)
def hello_meta(request):
return HttpResponse(
"From %s" % request.META.get("HTTP_REFERER") or "",
content_type=request.META.get("CONTENT_TYPE"),
)
def sync_waiter(request):
with sync_waiter.lock:
sync_waiter.active_threads.add(threading.current_thread())
sync_waiter.barrier.wait(timeout=0.5)
return hello(request)
sync_waiter.active_threads = set()
sync_waiter.lock = threading.Lock()
sync_waiter.barrier = threading.Barrier(2)
test_filename = __file__
urlpatterns = [
path("", hello),
path("file/", lambda x: FileResponse(open(test_filename, "rb"))),
path("meta/", hello_meta),
path("wait/", sync_waiter),
]
|
c3d098231e8902f6bff43ed8c1a4d216ce957c15c77d83c6b7a1f392f6689f3b | from django.contrib import admin
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from django.contrib.contenttypes.admin import GenericTabularInline
from django.contrib.contenttypes.models import ContentType
from django.forms.formsets import DEFAULT_MAX_NUM
from django.forms.models import ModelForm
from django.test import RequestFactory, SimpleTestCase, TestCase, override_settings
from django.urls import reverse
from .admin import MediaInline, MediaPermanentInline
from .admin import site as admin_site
from .models import Category, Episode, EpisodePermanent, Media, PhoneNumber
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
@override_settings(ROOT_URLCONF="generic_inline_admin.urls")
class GenericAdminViewTest(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
e = Episode.objects.create(name="This Week in Django")
self.episode_pk = e.pk
m = Media(content_object=e, url="http://example.com/podcast.mp3")
m.save()
self.mp3_media_pk = m.pk
m = Media(content_object=e, url="http://example.com/logo.png")
m.save()
self.png_media_pk = m.pk
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse("admin:generic_inline_admin_episode_add"))
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(
reverse(
"admin:generic_inline_admin_episode_change", args=(self.episode_pk,)
)
)
self.assertEqual(response.status_code, 200)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "This Week in Django",
# inline data
"generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": "1",
"generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": "0",
"generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": "0",
}
response = self.client.post(
reverse("admin:generic_inline_admin_episode_add"), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
prefix = "generic_inline_admin-media-content_type-object_id"
post_data = {
"name": "This Week in Django",
# inline data
f"{prefix}-TOTAL_FORMS": "3",
f"{prefix}-INITIAL_FORMS": "2",
f"{prefix}-MAX_NUM_FORMS": "0",
f"{prefix}-0-id": str(self.mp3_media_pk),
f"{prefix}-0-url": "http://example.com/podcast.mp3",
f"{prefix}-1-id": str(self.png_media_pk),
f"{prefix}-1-url": "http://example.com/logo.png",
f"{prefix}-2-id": "",
f"{prefix}-2-url": "",
}
url = reverse(
"admin:generic_inline_admin_episode_change", args=(self.episode_pk,)
)
response = self.client.post(url, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
@override_settings(ROOT_URLCONF="generic_inline_admin.urls")
class GenericInlineAdminParametersTest(TestDataMixin, TestCase):
factory = RequestFactory()
def setUp(self):
self.client.force_login(self.superuser)
def _create_object(self, model):
"""
Create a model with an attached Media object via GFK. We can't
load content via a fixture (since the GenericForeignKey relies on
content type IDs, which will vary depending on what other tests
have been run), thus we do it here.
"""
e = model.objects.create(name="This Week in Django")
Media.objects.create(content_object=e, url="http://example.com/podcast.mp3")
return e
def test_no_param(self):
"""
With one initial form, extra (default) at 3, there should be 4 forms.
"""
e = self._create_object(Episode)
response = self.client.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
formset = response.context["inline_admin_formsets"][0].formset
self.assertEqual(formset.total_form_count(), 4)
self.assertEqual(formset.initial_form_count(), 1)
def test_extra_param(self):
"""
With extra=0, there should be one form.
"""
class ExtraInline(GenericTabularInline):
model = Media
extra = 0
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [ExtraInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.total_form_count(), 1)
self.assertEqual(formset.initial_form_count(), 1)
def test_max_num_param(self):
"""
With extra=5 and max_num=2, there should be only 2 forms.
"""
class MaxNumInline(GenericTabularInline):
model = Media
extra = 5
max_num = 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [MaxNumInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.total_form_count(), 2)
self.assertEqual(formset.initial_form_count(), 1)
def test_min_num_param(self):
"""
With extra=3 and min_num=2, there should be five forms.
"""
class MinNumInline(GenericTabularInline):
model = Media
extra = 3
min_num = 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [MinNumInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.total_form_count(), 5)
self.assertEqual(formset.initial_form_count(), 1)
def test_get_extra(self):
class GetExtraInline(GenericTabularInline):
model = Media
extra = 4
def get_extra(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetExtraInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.extra, 2)
def test_get_min_num(self):
class GetMinNumInline(GenericTabularInline):
model = Media
min_num = 5
def get_min_num(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetMinNumInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.min_num, 2)
def test_get_max_num(self):
class GetMaxNumInline(GenericTabularInline):
model = Media
extra = 5
def get_max_num(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetMaxNumInline]
e = self._create_object(Episode)
request = self.factory.get(
reverse("admin:generic_inline_admin_episode_change", args=(e.pk,))
)
request.user = User(username="super", is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data["inline_admin_formsets"][0].formset
self.assertEqual(formset.max_num, 2)
@override_settings(ROOT_URLCONF="generic_inline_admin.urls")
class GenericInlineAdminWithUniqueTogetherTest(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_add(self):
category_id = Category.objects.create(name="male").pk
prefix = "generic_inline_admin-phonenumber-content_type-object_id"
post_data = {
"name": "John Doe",
# inline data
f"{prefix}-TOTAL_FORMS": "1",
f"{prefix}-INITIAL_FORMS": "0",
f"{prefix}-MAX_NUM_FORMS": "0",
f"{prefix}-0-id": "",
f"{prefix}-0-phone_number": "555-555-5555",
f"{prefix}-0-category": str(category_id),
}
response = self.client.get(reverse("admin:generic_inline_admin_contact_add"))
self.assertEqual(response.status_code, 200)
response = self.client.post(
reverse("admin:generic_inline_admin_contact_add"), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_delete(self):
from .models import Contact
c = Contact.objects.create(name="foo")
PhoneNumber.objects.create(
object_id=c.id,
content_type=ContentType.objects.get_for_model(Contact),
phone_number="555-555-5555",
)
response = self.client.post(
reverse("admin:generic_inline_admin_contact_delete", args=[c.pk])
)
self.assertContains(response, "Are you sure you want to delete")
@override_settings(ROOT_URLCONF="generic_inline_admin.urls")
class NoInlineDeletionTest(SimpleTestCase):
def test_no_deletion(self):
inline = MediaPermanentInline(EpisodePermanent, admin_site)
fake_request = object()
formset = inline.get_formset(fake_request)
self.assertFalse(formset.can_delete)
class MockRequest:
pass
class MockSuperUser:
def has_perm(self, perm, obj=None):
return True
request = MockRequest()
request.user = MockSuperUser()
@override_settings(ROOT_URLCONF="generic_inline_admin.urls")
class GenericInlineModelAdminTest(SimpleTestCase):
def setUp(self):
self.site = AdminSite()
def test_get_formset_kwargs(self):
media_inline = MediaInline(Media, AdminSite())
# Create a formset with default arguments
formset = media_inline.get_formset(request)
self.assertEqual(formset.max_num, DEFAULT_MAX_NUM)
self.assertIs(formset.can_order, False)
# Create a formset with custom keyword arguments
formset = media_inline.get_formset(request, max_num=100, can_order=True)
self.assertEqual(formset.max_num, 100)
self.assertIs(formset.can_order, True)
def test_custom_form_meta_exclude_with_readonly(self):
"""
The custom ModelForm's `Meta.exclude` is respected when
used in conjunction with `GenericInlineModelAdmin.readonly_fields`
and when no `ModelAdmin.exclude` is defined.
"""
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ["url"]
class MediaInline(GenericTabularInline):
readonly_fields = ["description"]
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [MediaInline]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["keywords", "id", "DELETE"],
)
def test_custom_form_meta_exclude(self):
"""
The custom ModelForm's `Meta.exclude` is respected by
`GenericInlineModelAdmin.get_formset`, and overridden if
`ModelAdmin.exclude` or `GenericInlineModelAdmin.exclude` are defined.
Refs #15907.
"""
# First with `GenericInlineModelAdmin` -----------------
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ["url"]
class MediaInline(GenericTabularInline):
exclude = ["description"]
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [MediaInline]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["url", "keywords", "id", "DELETE"],
)
# Then, only with `ModelForm` -----------------
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [MediaInline]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
["description", "keywords", "id", "DELETE"],
)
def test_get_fieldsets(self):
# get_fieldsets is called when figuring out form fields.
# Refs #18681.
class MediaForm(ModelForm):
class Meta:
model = Media
fields = "__all__"
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
can_delete = False
def get_fieldsets(self, request, obj=None):
return [(None, {"fields": ["url", "description"]})]
ma = MediaInline(Media, self.site)
form = ma.get_formset(None).form
self.assertEqual(form._meta.fields, ["url", "description"])
def test_get_formsets_with_inlines_returns_tuples(self):
"""
get_formsets_with_inlines() returns the correct tuples.
"""
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ["url"]
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
class AlternateInline(GenericTabularInline):
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [AlternateInline, MediaInline]
ma = EpisodeAdmin(Episode, self.site)
inlines = ma.get_inline_instances(request)
for (formset, inline), other_inline in zip(
ma.get_formsets_with_inlines(request), inlines
):
self.assertIsInstance(formset, other_inline.get_formset(request).__class__)
def test_get_inline_instances_override_get_inlines(self):
class MediaInline(GenericTabularInline):
model = Media
class AlternateInline(GenericTabularInline):
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = (AlternateInline, MediaInline)
def get_inlines(self, request, obj):
if hasattr(request, "name"):
if request.name == "alternate":
return self.inlines[:1]
elif request.name == "media":
return self.inlines[1:2]
return []
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(ma.get_inlines(request, None), [])
self.assertEqual(ma.get_inline_instances(request), [])
for name, inline_class in (
("alternate", AlternateInline),
("media", MediaInline),
):
request.name = name
self.assertEqual(ma.get_inlines(request, None), (inline_class,)),
self.assertEqual(type(ma.get_inline_instances(request)[0]), inline_class)
|
8893a983b3f6426953495f03a005fd3b398890ca2642fa6be8ebb69115f6ae5b | from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Episode(models.Model):
name = models.CharField(max_length=100)
length = models.CharField(max_length=100, blank=True)
author = models.CharField(max_length=100, blank=True)
class Media(models.Model):
"""
Media that can associated to any object.
"""
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
url = models.URLField()
description = models.CharField(max_length=100, blank=True)
keywords = models.CharField(max_length=100, blank=True)
def __str__(self):
return self.url
#
# Generic inline with unique_together
#
class Category(models.Model):
name = models.CharField(max_length=50)
class PhoneNumber(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
phone_number = models.CharField(max_length=30)
category = models.ForeignKey(Category, models.SET_NULL, null=True, blank=True)
class Meta:
unique_together = (
(
"content_type",
"object_id",
"phone_number",
),
)
class Contact(models.Model):
name = models.CharField(max_length=50)
phone_numbers = GenericRelation(PhoneNumber, related_query_name="phone_numbers")
#
# Generic inline with can_delete=False
#
class EpisodePermanent(Episode):
pass
|
de88c7fccd294e581292fffbd80bd50f6bb1af262027b15fec843e12f3758bc4 | from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import Category, Contact, Episode, EpisodePermanent, Media, PhoneNumber
site = admin.AdminSite(name="admin")
class MediaInline(GenericTabularInline):
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
MediaInline,
]
class PhoneNumberInline(GenericTabularInline):
model = PhoneNumber
class MediaPermanentInline(GenericTabularInline):
model = Media
can_delete = False
site.register(Episode, EpisodeAdmin)
site.register(Contact, inlines=[PhoneNumberInline])
site.register(Category)
site.register(EpisodePermanent, inlines=[MediaPermanentInline])
|
e9c566f6883aff51834ef470d0ad4e2552e4ba50569981a589c33e8c9427ce45 | from django.urls import path
from . import admin
urlpatterns = [
path("generic_inline_admin/admin/", admin.site.urls),
]
|
eb1c2b1e1f0dab5df632a574aa8c696dcbd448a1981320b4020ed5a77b884549 | import json
import random
from django.conf import settings
from django.contrib.messages import constants
from django.contrib.messages.storage.base import Message
from django.contrib.messages.storage.cookie import (
CookieStorage,
MessageDecoder,
MessageEncoder,
)
from django.test import SimpleTestCase, override_settings
from django.utils.crypto import get_random_string
from django.utils.safestring import SafeData, mark_safe
from .base import BaseTests
def set_cookie_data(storage, messages, invalid=False, encode_empty=False):
"""
Set ``request.COOKIES`` with the encoded data and remove the storage
backend's loaded data cache.
"""
encoded_data = storage._encode(messages, encode_empty=encode_empty)
if invalid:
# Truncate the first character so that the hash is invalid.
encoded_data = encoded_data[1:]
storage.request.COOKIES = {CookieStorage.cookie_name: encoded_data}
if hasattr(storage, "_loaded_data"):
del storage._loaded_data
def stored_cookie_messages_count(storage, response):
"""
Return an integer containing the number of messages stored.
"""
# Get a list of cookies, excluding ones with a max-age of 0 (because
# they have been marked for deletion).
cookie = response.cookies.get(storage.cookie_name)
if not cookie or cookie["max-age"] == 0:
return 0
data = storage._decode(cookie.value)
if not data:
return 0
if data[-1] == CookieStorage.not_finished:
data.pop()
return len(data)
@override_settings(
SESSION_COOKIE_DOMAIN=".example.com",
SESSION_COOKIE_SECURE=True,
SESSION_COOKIE_HTTPONLY=True,
)
class CookieTests(BaseTests, SimpleTestCase):
storage_class = CookieStorage
def stored_messages_count(self, storage, response):
return stored_cookie_messages_count(storage, response)
def encode_decode(self, *args, **kwargs):
storage = self.get_storage()
message = Message(constants.DEBUG, *args, **kwargs)
encoded = storage._encode(message)
return storage._decode(encoded)
def test_get(self):
storage = self.storage_class(self.get_request())
# Set initial data.
example_messages = ["test", "me"]
set_cookie_data(storage, example_messages)
# The message contains what's expected.
self.assertEqual(list(storage), example_messages)
@override_settings(SESSION_COOKIE_SAMESITE="Strict")
def test_cookie_setings(self):
"""
CookieStorage honors SESSION_COOKIE_DOMAIN, SESSION_COOKIE_SECURE, and
SESSION_COOKIE_HTTPONLY (#15618, #20972).
"""
# Test before the messages have been consumed
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, "test")
storage.update(response)
messages = storage._decode(response.cookies["messages"].value)
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].message, "test")
self.assertEqual(response.cookies["messages"]["domain"], ".example.com")
self.assertEqual(response.cookies["messages"]["expires"], "")
self.assertIs(response.cookies["messages"]["secure"], True)
self.assertIs(response.cookies["messages"]["httponly"], True)
self.assertEqual(response.cookies["messages"]["samesite"], "Strict")
# Deletion of the cookie (storing with an empty value) after the
# messages have been consumed.
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, "test")
for m in storage:
pass # Iterate through the storage to simulate consumption of messages.
storage.update(response)
self.assertEqual(response.cookies["messages"].value, "")
self.assertEqual(response.cookies["messages"]["domain"], ".example.com")
self.assertEqual(
response.cookies["messages"]["expires"], "Thu, 01 Jan 1970 00:00:00 GMT"
)
self.assertEqual(
response.cookies["messages"]["samesite"],
settings.SESSION_COOKIE_SAMESITE,
)
def test_get_bad_cookie(self):
request = self.get_request()
storage = self.storage_class(request)
# Set initial (invalid) data.
example_messages = ["test", "me"]
set_cookie_data(storage, example_messages, invalid=True)
# The message actually contains what we expect.
self.assertEqual(list(storage), [])
def test_max_cookie_length(self):
"""
If the data exceeds what is allowed in a cookie, older messages are
removed before saving (and returned by the ``update`` method).
"""
storage = self.get_storage()
response = self.get_response()
# When storing as a cookie, the cookie has constant overhead of approx
# 54 chars, and each message has a constant overhead of about 37 chars
# and a variable overhead of zero in the best case. We aim for a message
# size which will fit 4 messages into the cookie, but not 5.
# See also FallbackTest.test_session_fallback
msg_size = int((CookieStorage.max_cookie_size - 54) / 4.5 - 37)
first_msg = None
# Generate the same (tested) content every time that does not get run
# through zlib compression.
random.seed(42)
for i in range(5):
msg = get_random_string(msg_size)
storage.add(constants.INFO, msg)
if i == 0:
first_msg = msg
unstored_messages = storage.update(response)
cookie_storing = self.stored_messages_count(storage, response)
self.assertEqual(cookie_storing, 4)
self.assertEqual(len(unstored_messages), 1)
self.assertEqual(unstored_messages[0].message, first_msg)
def test_message_rfc6265(self):
non_compliant_chars = ["\\", ",", ";", '"']
messages = ["\\te,st", ';m"e', "\u2019", '123"NOTRECEIVED"']
storage = self.get_storage()
encoded = storage._encode(messages)
for illegal in non_compliant_chars:
self.assertEqual(encoded.find(illegal), -1)
def test_json_encoder_decoder(self):
"""
A complex nested data structure containing Message
instances is properly encoded/decoded by the custom JSON
encoder/decoder classes.
"""
messages = [
{
"message": Message(constants.INFO, "Test message"),
"message_list": [
Message(constants.INFO, "message %s") for x in range(5)
]
+ [{"another-message": Message(constants.ERROR, "error")}],
},
Message(constants.INFO, "message %s"),
]
encoder = MessageEncoder()
value = encoder.encode(messages)
decoded_messages = json.loads(value, cls=MessageDecoder)
self.assertEqual(messages, decoded_messages)
def test_safedata(self):
"""
A message containing SafeData is keeping its safe status when
retrieved from the message storage.
"""
self.assertIsInstance(
self.encode_decode(mark_safe("<b>Hello Django!</b>")).message,
SafeData,
)
self.assertNotIsInstance(
self.encode_decode("<b>Hello Django!</b>").message,
SafeData,
)
def test_extra_tags(self):
"""
A message's extra_tags attribute is correctly preserved when retrieved
from the message storage.
"""
for extra_tags in ["", None, "some tags"]:
with self.subTest(extra_tags=extra_tags):
self.assertEqual(
self.encode_decode("message", extra_tags=extra_tags).extra_tags,
extra_tags,
)
|
4cca5e8fd40def471162dbf1dc06c5c2a7239f11f200dacc322ac864341c91f9 | import random
from django.contrib.messages import constants
from django.contrib.messages.storage.fallback import CookieStorage, FallbackStorage
from django.test import SimpleTestCase
from django.utils.crypto import get_random_string
from .base import BaseTests
from .test_cookie import set_cookie_data, stored_cookie_messages_count
from .test_session import set_session_data, stored_session_messages_count
class FallbackTests(BaseTests, SimpleTestCase):
storage_class = FallbackStorage
def get_request(self):
self.session = {}
request = super().get_request()
request.session = self.session
return request
def get_cookie_storage(self, storage):
return storage.storages[-2]
def get_session_storage(self, storage):
return storage.storages[-1]
def stored_cookie_messages_count(self, storage, response):
return stored_cookie_messages_count(self.get_cookie_storage(storage), response)
def stored_session_messages_count(self, storage, response):
return stored_session_messages_count(self.get_session_storage(storage))
def stored_messages_count(self, storage, response):
"""
Return the storage totals from both cookie and session backends.
"""
return self.stored_cookie_messages_count(
storage, response
) + self.stored_session_messages_count(storage, response)
def test_get(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
# Set initial cookie data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, example_messages)
# Overwrite the _get method of the fallback storage to prove it is not
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._get = None
self.assertEqual(list(storage), example_messages)
def test_get_empty(self):
request = self.get_request()
storage = self.storage_class(request)
# Overwrite the _get method of the fallback storage to prove it is not
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._get = None
self.assertEqual(list(storage), [])
def test_get_fallback(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(
cookie_storage, example_messages[:4] + [CookieStorage.not_finished]
)
set_session_data(session_storage, example_messages[4:])
self.assertEqual(list(storage), example_messages)
def test_get_fallback_only(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
example_messages = [str(i) for i in range(5)]
set_cookie_data(cookie_storage, [CookieStorage.not_finished], encode_empty=True)
set_session_data(session_storage, example_messages)
self.assertEqual(list(storage), example_messages)
def test_flush_used_backends(self):
request = self.get_request()
storage = self.storage_class(request)
cookie_storage = self.get_cookie_storage(storage)
session_storage = self.get_session_storage(storage)
# Set initial cookie and session data.
set_cookie_data(cookie_storage, ["cookie", CookieStorage.not_finished])
set_session_data(session_storage, ["session"])
# When updating, previously used but no longer needed backends are
# flushed.
response = self.get_response()
list(storage)
storage.update(response)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 0)
def test_no_fallback(self):
"""
(1) A short number of messages whose data size doesn't exceed what is
allowed in a cookie will all be stored in the CookieBackend.
(2) If the CookieBackend can store all messages, the SessionBackend
won't be written to at all.
"""
storage = self.get_storage()
response = self.get_response()
# Overwrite the _store method of the fallback storage to prove it isn't
# used (it would cause a TypeError: 'NoneType' object is not callable).
self.get_session_storage(storage)._store = None
for i in range(5):
storage.add(constants.INFO, str(i) * 100)
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 5)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 0)
def test_session_fallback(self):
"""
If the data exceeds what is allowed in a cookie, messages which did
not fit are stored in the SessionBackend.
"""
storage = self.get_storage()
response = self.get_response()
# see comment in CookieTests.test_cookie_max_length()
msg_size = int((CookieStorage.max_cookie_size - 54) / 4.5 - 37)
# Generate the same (tested) content every time that does not get run
# through zlib compression.
random.seed(42)
for i in range(5):
storage.add(constants.INFO, get_random_string(msg_size))
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 4)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 1)
def test_session_fallback_only(self):
"""
Large messages, none of which fit in a cookie, are stored in the
SessionBackend (and nothing is stored in the CookieBackend).
"""
storage = self.get_storage()
response = self.get_response()
# Generate the same (tested) content every time that does not get run
# through zlib compression.
random.seed(42)
storage.add(constants.INFO, get_random_string(5000))
storage.update(response)
cookie_storing = self.stored_cookie_messages_count(storage, response)
self.assertEqual(cookie_storing, 0)
session_storing = self.stored_session_messages_count(storage, response)
self.assertEqual(session_storing, 1)
|
41cf6a8a084d12f7fdab20ed34332a1ba43c6238724862404df1bc3bfca8c82d | from unittest import mock
from django.contrib.messages import constants
from django.contrib.messages.storage import base
from django.contrib.messages.storage.base import Message
from django.test import SimpleTestCase, override_settings
class MessageTests(SimpleTestCase):
def test_eq(self):
msg_1 = Message(constants.INFO, "Test message 1")
msg_2 = Message(constants.INFO, "Test message 2")
msg_3 = Message(constants.WARNING, "Test message 1")
self.assertEqual(msg_1, msg_1)
self.assertEqual(msg_1, mock.ANY)
self.assertNotEqual(msg_1, msg_2)
self.assertNotEqual(msg_1, msg_3)
self.assertNotEqual(msg_2, msg_3)
class TestLevelTags(SimpleTestCase):
message_tags = {
constants.INFO: "info",
constants.DEBUG: "",
constants.WARNING: "",
constants.ERROR: "bad",
constants.SUCCESS: "",
12: "custom",
}
@override_settings(MESSAGE_TAGS=message_tags)
def test_override_settings_level_tags(self):
self.assertEqual(base.LEVEL_TAGS, self.message_tags)
|
0f7ebe76bac28602137d6d4bfca241e0cd6ac3b902b503b2c6ece59d819fe6e8 | from django.core.signing import b64_decode
from django.test import TestCase, override_settings
from django.urls import reverse
from .models import SomeObject
from .urls import ContactFormViewWithMsg, DeleteFormViewWithMsg
@override_settings(ROOT_URLCONF="messages_tests.urls")
class SuccessMessageMixinTests(TestCase):
def test_set_messages_success(self):
author = {"name": "John Doe", "slug": "success-msg"}
add_url = reverse("add_success_msg")
req = self.client.post(add_url, author)
# Uncompressed message is stored in the cookie.
value = b64_decode(
req.cookies["messages"].value.split(":")[0].encode(),
).decode()
self.assertIn(ContactFormViewWithMsg.success_message % author, value)
def test_set_messages_success_on_delete(self):
object_to_delete = SomeObject.objects.create(name="MyObject")
delete_url = reverse("success_msg_on_delete", args=[object_to_delete.pk])
response = self.client.post(delete_url, follow=True)
self.assertContains(response, DeleteFormViewWithMsg.success_message)
|
7c71a389ae67954cd7c27fd0a6fc046bcc78ccddb02a297f1234efc7adf96660 | from django.contrib.messages import constants, get_level, set_level
from django.contrib.messages.api import MessageFailure
from django.contrib.messages.constants import DEFAULT_LEVELS
from django.contrib.messages.storage import default_storage
from django.contrib.messages.storage.base import Message
from django.http import HttpRequest, HttpResponse
from django.test import modify_settings, override_settings
from django.urls import reverse
from django.utils.translation import gettext_lazy
def add_level_messages(storage):
"""
Add 6 messages from different levels (including a custom one) to a storage
instance.
"""
storage.add(constants.INFO, "A generic info message")
storage.add(29, "Some custom level")
storage.add(constants.DEBUG, "A debugging message", extra_tags="extra-tag")
storage.add(constants.WARNING, "A warning")
storage.add(constants.ERROR, "An error")
storage.add(constants.SUCCESS, "This was a triumph.")
class BaseTests:
storage_class = default_storage
levels = {
"debug": constants.DEBUG,
"info": constants.INFO,
"success": constants.SUCCESS,
"warning": constants.WARNING,
"error": constants.ERROR,
}
def setUp(self):
self.settings_override = override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": (
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
),
},
}
],
ROOT_URLCONF="messages_tests.urls",
MESSAGE_TAGS={},
MESSAGE_STORAGE="%s.%s"
% (self.storage_class.__module__, self.storage_class.__name__),
SESSION_SERIALIZER="django.contrib.sessions.serializers.JSONSerializer",
)
self.settings_override.enable()
def tearDown(self):
self.settings_override.disable()
def get_request(self):
return HttpRequest()
def get_response(self):
return HttpResponse()
def get_storage(self, data=None):
"""
Return the storage backend, setting its loaded data to the ``data``
argument.
This method avoids the storage ``_get`` method from getting called so
that other parts of the storage backend can be tested independent of
the message retrieval logic.
"""
storage = self.storage_class(self.get_request())
storage._loaded_data = data or []
return storage
def test_repr(self):
request = self.get_request()
storage = self.storage_class(request)
self.assertEqual(
repr(storage),
f"<{self.storage_class.__qualname__}: request=<HttpRequest>>",
)
def test_add(self):
storage = self.get_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, "Test message 1")
self.assertTrue(storage.added_new)
storage.add(constants.INFO, "Test message 2", extra_tags="tag")
self.assertEqual(len(storage), 2)
def test_add_lazy_translation(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, gettext_lazy("lazy message"))
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
def test_no_update(self):
storage = self.get_storage()
response = self.get_response()
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_add_update(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, "Test message 1")
storage.add(constants.INFO, "Test message 1", extra_tags="tag")
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 2)
def test_existing_add_read_update(self):
storage = self.get_existing_storage()
response = self.get_response()
storage.add(constants.INFO, "Test message 3")
list(storage) # Simulates a read
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_existing_read_add_update(self):
storage = self.get_existing_storage()
response = self.get_response()
list(storage) # Simulates a read
storage.add(constants.INFO, "Test message 3")
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_full_request_response_cycle(self):
"""
With the message middleware enabled, messages are properly stored and
retrieved across the full request/redirect/response cycle.
"""
data = {
"messages": ["Test message %d" % x for x in range(5)],
}
show_url = reverse("show_message")
for level in ("debug", "info", "success", "warning", "error"):
add_url = reverse("add_message", args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertIn("messages", response.context)
messages = [Message(self.levels[level], msg) for msg in data["messages"]]
self.assertEqual(list(response.context["messages"]), messages)
for msg in data["messages"]:
self.assertContains(response, msg)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_with_template_response(self):
data = {
"messages": ["Test message %d" % x for x in range(5)],
}
show_url = reverse("show_template_response")
for level in self.levels:
add_url = reverse("add_template_response", args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertIn("messages", response.context)
for msg in data["messages"]:
self.assertContains(response, msg)
# there shouldn't be any messages on second GET request
response = self.client.get(show_url)
for msg in data["messages"]:
self.assertNotContains(response, msg)
def test_context_processor_message_levels(self):
show_url = reverse("show_template_response")
response = self.client.get(show_url)
self.assertIn("DEFAULT_MESSAGE_LEVELS", response.context)
self.assertEqual(response.context["DEFAULT_MESSAGE_LEVELS"], DEFAULT_LEVELS)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_multiple_posts(self):
"""
Messages persist properly when multiple POSTs are made before a GET.
"""
data = {
"messages": ["Test message %d" % x for x in range(5)],
}
show_url = reverse("show_message")
messages = []
for level in ("debug", "info", "success", "warning", "error"):
messages.extend(
Message(self.levels[level], msg) for msg in data["messages"]
)
add_url = reverse("add_message", args=(level,))
self.client.post(add_url, data)
response = self.client.get(show_url)
self.assertIn("messages", response.context)
self.assertEqual(list(response.context["messages"]), messages)
for msg in data["messages"]:
self.assertContains(response, msg)
@modify_settings(
INSTALLED_APPS={"remove": "django.contrib.messages"},
MIDDLEWARE={"remove": "django.contrib.messages.middleware.MessageMiddleware"},
)
@override_settings(
MESSAGE_LEVEL=constants.DEBUG,
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
}
],
)
def test_middleware_disabled(self):
"""
When the middleware is disabled, an exception is raised when one
attempts to store a message.
"""
data = {
"messages": ["Test message %d" % x for x in range(5)],
}
reverse("show_message")
for level in ("debug", "info", "success", "warning", "error"):
add_url = reverse("add_message", args=(level,))
with self.assertRaises(MessageFailure):
self.client.post(add_url, data, follow=True)
@modify_settings(
INSTALLED_APPS={"remove": "django.contrib.messages"},
MIDDLEWARE={"remove": "django.contrib.messages.middleware.MessageMiddleware"},
)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
}
],
)
def test_middleware_disabled_fail_silently(self):
"""
When the middleware is disabled, an exception is not raised
if 'fail_silently' is True.
"""
data = {
"messages": ["Test message %d" % x for x in range(5)],
"fail_silently": True,
}
show_url = reverse("show_message")
for level in ("debug", "info", "success", "warning", "error"):
add_url = reverse("add_message", args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertNotIn("messages", response.context)
def stored_messages_count(self, storage, response):
"""
Return the number of messages being stored after a
``storage.update()`` call.
"""
raise NotImplementedError("This method must be set by a subclass.")
def test_get(self):
raise NotImplementedError("This method must be set by a subclass.")
def get_existing_storage(self):
return self.get_storage(
[
Message(constants.INFO, "Test message 1"),
Message(constants.INFO, "Test message 2", extra_tags="tag"),
]
)
def test_existing_read(self):
"""
Reading the existing storage doesn't cause the data to be lost.
"""
storage = self.get_existing_storage()
self.assertFalse(storage.used)
# After iterating the storage engine directly, the used flag is set.
data = list(storage)
self.assertTrue(storage.used)
# The data does not disappear because it has been iterated.
self.assertEqual(data, list(storage))
def test_existing_add(self):
storage = self.get_existing_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, "Test message 3")
self.assertTrue(storage.added_new)
def test_default_level(self):
# get_level works even with no storage on the request.
request = self.get_request()
self.assertEqual(get_level(request), constants.INFO)
# get_level returns the default level if it hasn't been set.
storage = self.get_storage()
request._messages = storage
self.assertEqual(get_level(request), constants.INFO)
# Only messages of sufficient level get recorded.
add_level_messages(storage)
self.assertEqual(len(storage), 5)
def test_low_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 5))
self.assertEqual(get_level(request), 5)
add_level_messages(storage)
self.assertEqual(len(storage), 6)
def test_high_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 30))
self.assertEqual(get_level(request), 30)
add_level_messages(storage)
self.assertEqual(len(storage), 2)
@override_settings(MESSAGE_LEVEL=29)
def test_settings_level(self):
request = self.get_request()
storage = self.storage_class(request)
self.assertEqual(get_level(request), 29)
add_level_messages(storage)
self.assertEqual(len(storage), 3)
def test_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
storage.add(constants.INFO, "A generic info message", extra_tags=None)
tags = [msg.tags for msg in storage]
self.assertEqual(
tags, ["info", "", "extra-tag debug", "warning", "error", "success", "info"]
)
def test_level_tag(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.level_tag for msg in storage]
self.assertEqual(tags, ["info", "", "debug", "warning", "error", "success"])
@override_settings(
MESSAGE_TAGS={
constants.INFO: "info",
constants.DEBUG: "",
constants.WARNING: "",
constants.ERROR: "bad",
29: "custom",
}
)
def test_custom_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags, ["info", "custom", "extra-tag", "", "bad", "success"])
|
a0f2a7701b75337eb446efe01c11c701c8e4a6c7333d6a15c9545e50d9d497a3 | from django.contrib.messages import constants
from django.contrib.messages.storage.base import Message
from django.contrib.messages.storage.session import SessionStorage
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpRequest
from django.test import TestCase
from django.utils.safestring import SafeData, mark_safe
from .base import BaseTests
def set_session_data(storage, messages):
"""
Sets the messages into the backend request's session and remove the
backend's loaded data cache.
"""
storage.request.session[storage.session_key] = storage.serialize_messages(messages)
if hasattr(storage, "_loaded_data"):
del storage._loaded_data
def stored_session_messages_count(storage):
data = storage.deserialize_messages(
storage.request.session.get(storage.session_key, [])
)
return len(data)
class SessionTests(BaseTests, TestCase):
storage_class = SessionStorage
def get_request(self):
self.session = {}
request = super().get_request()
request.session = self.session
return request
def stored_messages_count(self, storage, response):
return stored_session_messages_count(storage)
def test_no_session(self):
msg = (
"The session-based temporary message storage requires session "
"middleware to be installed, and come before the message "
"middleware in the MIDDLEWARE list."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.storage_class(HttpRequest())
def test_get(self):
storage = self.storage_class(self.get_request())
example_messages = ["test", "me"]
set_session_data(storage, example_messages)
self.assertEqual(list(storage), example_messages)
def test_safedata(self):
"""
A message containing SafeData keeps its safe status when retrieved from
the message storage.
"""
storage = self.get_storage()
message = Message(constants.DEBUG, mark_safe("<b>Hello Django!</b>"))
set_session_data(storage, [message])
self.assertIsInstance(list(storage)[0].message, SafeData)
|
ba64fdf096f66346ccacc111fb8415bfd6ed9ad432826219cb188df9713d9d4b | from django import forms
from django.contrib import messages
from django.contrib.messages.views import SuccessMessageMixin
from django.http import HttpResponse, HttpResponseRedirect
from django.template import engines
from django.template.response import TemplateResponse
from django.urls import path, re_path, reverse
from django.views.decorators.cache import never_cache
from django.views.generic.edit import DeleteView, FormView
from .models import SomeObject
TEMPLATE = """{% if messages %}
<ul class="messages">
{% for message in messages %}
<li{% if message.tags %} class="{{ message.tags }}"{% endif %}>
{{ message }}
</li>
{% endfor %}
</ul>
{% endif %}
"""
@never_cache
def add(request, message_type):
# Don't default to False here to test that it defaults to False if
# unspecified.
fail_silently = request.POST.get("fail_silently", None)
for msg in request.POST.getlist("messages"):
if fail_silently is not None:
getattr(messages, message_type)(request, msg, fail_silently=fail_silently)
else:
getattr(messages, message_type)(request, msg)
return HttpResponseRedirect(reverse("show_message"))
@never_cache
def add_template_response(request, message_type):
for msg in request.POST.getlist("messages"):
getattr(messages, message_type)(request, msg)
return HttpResponseRedirect(reverse("show_template_response"))
@never_cache
def show(request):
template = engines["django"].from_string(TEMPLATE)
return HttpResponse(template.render(request=request))
@never_cache
def show_template_response(request):
template = engines["django"].from_string(TEMPLATE)
return TemplateResponse(request, template)
class ContactForm(forms.Form):
name = forms.CharField(required=True)
slug = forms.SlugField(required=True)
class ContactFormViewWithMsg(SuccessMessageMixin, FormView):
form_class = ContactForm
success_url = show
success_message = "%(name)s was created successfully"
class DeleteFormViewWithMsg(SuccessMessageMixin, DeleteView):
model = SomeObject
success_url = "/show/"
success_message = "Object was deleted successfully"
urlpatterns = [
re_path("^add/(debug|info|success|warning|error)/$", add, name="add_message"),
path("add/msg/", ContactFormViewWithMsg.as_view(), name="add_success_msg"),
path(
"delete/msg/<int:pk>",
DeleteFormViewWithMsg.as_view(),
name="success_msg_on_delete",
),
path("show/", show, name="show_message"),
re_path(
"^template_response/add/(debug|info|success|warning|error)/$",
add_template_response,
name="add_template_response",
),
path(
"template_response/show/", show_template_response, name="show_template_response"
),
]
|
f5b93eefa160cbe3f8d0b4bfcb9b75cf6a523464e03e98393e485febd46af581 | from django.contrib import messages
from django.test import RequestFactory, SimpleTestCase
class DummyStorage:
"""
dummy message-store to test the api methods
"""
def __init__(self):
self.store = []
def add(self, level, message, extra_tags=""):
self.store.append(message)
class ApiTests(SimpleTestCase):
rf = RequestFactory()
def setUp(self):
self.request = self.rf.request()
self.storage = DummyStorage()
def test_ok(self):
msg = "some message"
self.request._messages = self.storage
messages.add_message(self.request, messages.DEBUG, msg)
self.assertIn(msg, self.storage.store)
def test_request_is_none(self):
msg = "add_message() argument must be an HttpRequest object, not 'NoneType'."
self.request._messages = self.storage
with self.assertRaisesMessage(TypeError, msg):
messages.add_message(None, messages.DEBUG, "some message")
self.assertEqual(self.storage.store, [])
def test_middleware_missing(self):
msg = (
"You cannot add messages without installing "
"django.contrib.messages.middleware.MessageMiddleware"
)
with self.assertRaisesMessage(messages.MessageFailure, msg):
messages.add_message(self.request, messages.DEBUG, "some message")
self.assertEqual(self.storage.store, [])
def test_middleware_missing_silently(self):
messages.add_message(
self.request, messages.DEBUG, "some message", fail_silently=True
)
self.assertEqual(self.storage.store, [])
class CustomRequest:
def __init__(self, request):
self._request = request
def __getattribute__(self, attr):
try:
return super().__getattribute__(attr)
except AttributeError:
return getattr(self._request, attr)
class CustomRequestApiTests(ApiTests):
"""
add_message() should use ducktyping to allow request wrappers such as the
one in Django REST framework.
"""
def setUp(self):
super().setUp()
self.request = CustomRequest(self.request)
|
855307a552e4402220c3b26102b786ef083cac1725535454be4dc65a594c8dad | import unittest
from django.contrib.messages.middleware import MessageMiddleware
from django.http import HttpRequest, HttpResponse
class MiddlewareTests(unittest.TestCase):
def test_response_without_messages(self):
"""
MessageMiddleware is tolerant of messages not existing on request.
"""
request = HttpRequest()
response = HttpResponse()
MessageMiddleware(lambda req: HttpResponse()).process_response(
request, response
)
|
0274f3289d71f0a6bd9b15b24e7b14401bcfd0d39e236efbeb199701d7696b58 | import os
from pathlib import Path
from unittest import mock
from django.core.exceptions import ImproperlyConfigured
from django.test import SimpleTestCase, override_settings
from django.urls.resolvers import LocaleRegexDescriptor, RegexPattern
from django.utils import translation
here = os.path.dirname(os.path.abspath(__file__))
@override_settings(LOCALE_PATHS=[os.path.join(here, "translations", "locale")])
class LocaleRegexDescriptorTests(SimpleTestCase):
def setUp(self):
translation.trans_real._translations = {}
def tearDown(self):
translation.trans_real._translations = {}
def test_translated_regex_compiled_per_language(self):
provider = RegexPattern(translation.gettext_lazy("^foo/$"))
with translation.override("de"):
de_compiled = provider.regex
# compiled only once per language
error = AssertionError(
"tried to compile url regex twice for the same language"
)
with mock.patch("django.urls.resolvers.re.compile", side_effect=error):
de_compiled_2 = provider.regex
with translation.override("fr"):
fr_compiled = provider.regex
self.assertEqual(fr_compiled.pattern, "^foo-fr/$")
self.assertEqual(de_compiled.pattern, "^foo-de/$")
self.assertEqual(de_compiled, de_compiled_2)
def test_nontranslated_regex_compiled_once(self):
provider = RegexPattern("^foo/$")
with translation.override("de"):
de_compiled = provider.regex
with translation.override("fr"):
# compiled only once, regardless of language
error = AssertionError("tried to compile non-translated url regex twice")
with mock.patch("django.urls.resolvers.re.compile", side_effect=error):
fr_compiled = provider.regex
self.assertEqual(de_compiled.pattern, "^foo/$")
self.assertEqual(fr_compiled.pattern, "^foo/$")
def test_regex_compile_error(self):
"""Regex errors are re-raised as ImproperlyConfigured."""
provider = RegexPattern("*")
msg = '"*" is not a valid regular expression: nothing to repeat'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
provider.regex
def test_access_locale_regex_descriptor(self):
self.assertIsInstance(RegexPattern.regex, LocaleRegexDescriptor)
@override_settings(LOCALE_PATHS=[Path(here) / "translations" / "locale"])
class LocaleRegexDescriptorPathLibTests(LocaleRegexDescriptorTests):
pass
|
8240c05cf9c23f97743bcc353a1213f6683455de9182cd3da49e5968ad5c3fc9 | # I just raise an AttributeError to confuse the view loading mechanism
raise AttributeError("I am here to confuse django.urls.get_callable")
|
d632385ff71be70395bd39216c40cd49839646f726a32273e319c06180e70fca | # Used by the ErrorHandlerResolutionTests test case.
urlpatterns = []
handler400 = "urlpatterns_reverse.views.empty_view"
handler404 = "urlpatterns_reverse.views.empty_view"
handler500 = "urlpatterns_reverse.views.empty_view"
|
1928dc1c7777d4fe2991f3be4e1904dcbfc4ae9b8c787cc0b2d3f267aac9af2d | from django.urls import include, path, re_path
from .views import empty_view
urlpatterns = [
path("", empty_view, name="named-url3"),
re_path(r"^extra/(?P<extra>\w+)/$", empty_view, name="named-url4"),
re_path(r"^(?P<one>[0-9]+)|(?P<two>[0-9]+)/$", empty_view),
path("included/", include("urlpatterns_reverse.included_named_urls2")),
]
|
13a1535bb90819e35a2ed69b754c8362bd7880f6f2d30987413a42780e98072f | """
Unit tests for reverse URL lookups.
"""
import pickle
import sys
import threading
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.http import HttpRequest, HttpResponsePermanentRedirect, HttpResponseRedirect
from django.shortcuts import redirect
from django.test import RequestFactory, SimpleTestCase, TestCase, override_settings
from django.test.utils import override_script_prefix
from django.urls import (
NoReverseMatch,
Resolver404,
ResolverMatch,
URLPattern,
URLResolver,
get_callable,
get_resolver,
get_urlconf,
include,
path,
re_path,
resolve,
reverse,
reverse_lazy,
)
from django.urls.resolvers import RegexPattern
from . import middleware, urlconf_outer, views
from .utils import URLObject
from .views import empty_view
resolve_test_data = (
# These entries are in the format:
# (path, url_name, app_name, namespace, view_name, func, args, kwargs)
# Simple case
(
"/normal/42/37/",
"normal-view",
"",
"",
"normal-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/view_class/42/37/",
"view-class",
"",
"",
"view-class",
views.view_class_instance,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/included/normal/42/37/",
"inc-normal-view",
"included_namespace_urls",
"included_namespace_urls",
"included_namespace_urls:inc-normal-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/included/view_class/42/37/",
"inc-view-class",
"included_namespace_urls",
"included_namespace_urls",
"included_namespace_urls:inc-view-class",
views.view_class_instance,
(),
{"arg1": "42", "arg2": "37"},
),
# Unnamed args are dropped if you have *any* kwargs in a pattern
(
"/mixed_args/42/37/",
"mixed-args",
"",
"",
"mixed-args",
views.empty_view,
(),
{"arg2": "37"},
),
(
"/included/mixed_args/42/37/",
"inc-mixed-args",
"included_namespace_urls",
"included_namespace_urls",
"included_namespace_urls:inc-mixed-args",
views.empty_view,
(),
{"arg2": "37"},
),
(
"/included/12/mixed_args/42/37/",
"inc-mixed-args",
"included_namespace_urls",
"included_namespace_urls",
"included_namespace_urls:inc-mixed-args",
views.empty_view,
(),
{"arg2": "37"},
),
# Unnamed views should have None as the url_name. Regression data for #21157.
(
"/unnamed/normal/42/37/",
None,
"",
"",
"urlpatterns_reverse.views.empty_view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/unnamed/view_class/42/37/",
None,
"",
"",
"urlpatterns_reverse.views.ViewClass",
views.view_class_instance,
(),
{"arg1": "42", "arg2": "37"},
),
# If you have no kwargs, you get an args list.
(
"/no_kwargs/42/37/",
"no-kwargs",
"",
"",
"no-kwargs",
views.empty_view,
("42", "37"),
{},
),
(
"/included/no_kwargs/42/37/",
"inc-no-kwargs",
"included_namespace_urls",
"included_namespace_urls",
"included_namespace_urls:inc-no-kwargs",
views.empty_view,
("42", "37"),
{},
),
(
"/included/12/no_kwargs/42/37/",
"inc-no-kwargs",
"included_namespace_urls",
"included_namespace_urls",
"included_namespace_urls:inc-no-kwargs",
views.empty_view,
("12", "42", "37"),
{},
),
# Namespaces
(
"/test1/inner/42/37/",
"urlobject-view",
"testapp",
"test-ns1",
"test-ns1:urlobject-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/included/test3/inner/42/37/",
"urlobject-view",
"included_namespace_urls:testapp",
"included_namespace_urls:test-ns3",
"included_namespace_urls:test-ns3:urlobject-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/ns-included1/normal/42/37/",
"inc-normal-view",
"included_namespace_urls",
"inc-ns1",
"inc-ns1:inc-normal-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/included/test3/inner/42/37/",
"urlobject-view",
"included_namespace_urls:testapp",
"included_namespace_urls:test-ns3",
"included_namespace_urls:test-ns3:urlobject-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/default/inner/42/37/",
"urlobject-view",
"testapp",
"testapp",
"testapp:urlobject-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/other2/inner/42/37/",
"urlobject-view",
"nodefault",
"other-ns2",
"other-ns2:urlobject-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/other1/inner/42/37/",
"urlobject-view",
"nodefault",
"other-ns1",
"other-ns1:urlobject-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
# Nested namespaces
(
"/ns-included1/test3/inner/42/37/",
"urlobject-view",
"included_namespace_urls:testapp",
"inc-ns1:test-ns3",
"inc-ns1:test-ns3:urlobject-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/ns-included1/ns-included4/ns-included2/test3/inner/42/37/",
"urlobject-view",
"included_namespace_urls:namespace_urls:included_namespace_urls:testapp",
"inc-ns1:inc-ns4:inc-ns2:test-ns3",
"inc-ns1:inc-ns4:inc-ns2:test-ns3:urlobject-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/app-included/test3/inner/42/37/",
"urlobject-view",
"included_namespace_urls:testapp",
"inc-app:test-ns3",
"inc-app:test-ns3:urlobject-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
(
"/app-included/ns-included4/ns-included2/test3/inner/42/37/",
"urlobject-view",
"included_namespace_urls:namespace_urls:included_namespace_urls:testapp",
"inc-app:inc-ns4:inc-ns2:test-ns3",
"inc-app:inc-ns4:inc-ns2:test-ns3:urlobject-view",
views.empty_view,
(),
{"arg1": "42", "arg2": "37"},
),
# Namespaces capturing variables
(
"/inc70/",
"inner-nothing",
"included_urls",
"inc-ns5",
"inc-ns5:inner-nothing",
views.empty_view,
(),
{"outer": "70"},
),
(
"/inc78/extra/foobar/",
"inner-extra",
"included_urls",
"inc-ns5",
"inc-ns5:inner-extra",
views.empty_view,
(),
{"outer": "78", "extra": "foobar"},
),
)
test_data = (
("places", "/places/3/", [3], {}),
("places", "/places/3/", ["3"], {}),
("places", NoReverseMatch, ["a"], {}),
("places", NoReverseMatch, [], {}),
("places?", "/place/", [], {}),
("places+", "/places/", [], {}),
("places*", "/place/", [], {}),
("places2?", "/", [], {}),
("places2+", "/places/", [], {}),
("places2*", "/", [], {}),
("places3", "/places/4/", [4], {}),
("places3", "/places/harlem/", ["harlem"], {}),
("places3", NoReverseMatch, ["harlem64"], {}),
("places4", "/places/3/", [], {"id": 3}),
("people", NoReverseMatch, [], {}),
("people", "/people/adrian/", ["adrian"], {}),
("people", "/people/adrian/", [], {"name": "adrian"}),
("people", NoReverseMatch, ["name with spaces"], {}),
("people", NoReverseMatch, [], {"name": "name with spaces"}),
("people2", "/people/name/", [], {}),
("people2a", "/people/name/fred/", ["fred"], {}),
("people_backref", "/people/nate-nate/", ["nate"], {}),
("people_backref", "/people/nate-nate/", [], {"name": "nate"}),
("optional", "/optional/fred/", [], {"name": "fred"}),
("optional", "/optional/fred/", ["fred"], {}),
("named_optional", "/optional/1/", [1], {}),
("named_optional", "/optional/1/", [], {"arg1": 1}),
("named_optional", "/optional/1/2/", [1, 2], {}),
("named_optional", "/optional/1/2/", [], {"arg1": 1, "arg2": 2}),
("named_optional_terminated", "/optional/1/", [1], {}),
("named_optional_terminated", "/optional/1/", [], {"arg1": 1}),
("named_optional_terminated", "/optional/1/2/", [1, 2], {}),
("named_optional_terminated", "/optional/1/2/", [], {"arg1": 1, "arg2": 2}),
("hardcoded", "/hardcoded/", [], {}),
("hardcoded2", "/hardcoded/doc.pdf", [], {}),
("people3", "/people/il/adrian/", [], {"state": "il", "name": "adrian"}),
("people3", NoReverseMatch, [], {"state": "il"}),
("people3", NoReverseMatch, [], {"name": "adrian"}),
("people4", NoReverseMatch, [], {"state": "il", "name": "adrian"}),
("people6", "/people/il/test/adrian/", ["il/test", "adrian"], {}),
("people6", "/people//adrian/", ["adrian"], {}),
("range", "/character_set/a/", [], {}),
("range2", "/character_set/x/", [], {}),
("price", "/price/$10/", ["10"], {}),
("price2", "/price/$10/", ["10"], {}),
("price3", "/price/$10/", ["10"], {}),
(
"product",
"/product/chocolate+($2.00)/",
[],
{"price": "2.00", "product": "chocolate"},
),
("headlines", "/headlines/2007.5.21/", [], {"year": 2007, "month": 5, "day": 21}),
(
"windows",
r"/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/",
[],
{"drive_name": "C", "path": r"Documents and Settings\spam"},
),
("special", r"/special_chars/~@+%5C$*%7C/", [r"~@+\$*|"], {}),
("special", r"/special_chars/some%20resource/", [r"some resource"], {}),
("special", r"/special_chars/10%25%20complete/", [r"10% complete"], {}),
("special", r"/special_chars/some%20resource/", [], {"chars": r"some resource"}),
("special", r"/special_chars/10%25%20complete/", [], {"chars": r"10% complete"}),
("special", NoReverseMatch, [""], {}),
("mixed", "/john/0/", [], {"name": "john"}),
("repeats", "/repeats/a/", [], {}),
("repeats2", "/repeats/aa/", [], {}),
("repeats3", "/repeats/aa/", [], {}),
("test", "/test/1", [], {}),
("inner-nothing", "/outer/42/", [], {"outer": "42"}),
("inner-nothing", "/outer/42/", ["42"], {}),
("inner-nothing", NoReverseMatch, ["foo"], {}),
("inner-extra", "/outer/42/extra/inner/", [], {"extra": "inner", "outer": "42"}),
("inner-extra", "/outer/42/extra/inner/", ["42", "inner"], {}),
("inner-extra", NoReverseMatch, ["fred", "inner"], {}),
("inner-no-kwargs", "/outer-no-kwargs/42/inner-no-kwargs/1/", ["42", "1"], {}),
("disjunction", NoReverseMatch, ["foo"], {}),
("inner-disjunction", NoReverseMatch, ["10", "11"], {}),
("extra-places", "/e-places/10/", ["10"], {}),
("extra-people", "/e-people/fred/", ["fred"], {}),
("extra-people", "/e-people/fred/", [], {"name": "fred"}),
("part", "/part/one/", [], {"value": "one"}),
("part", "/prefix/xx/part/one/", [], {"value": "one", "prefix": "xx"}),
("part2", "/part2/one/", [], {"value": "one"}),
("part2", "/part2/", [], {}),
("part2", "/prefix/xx/part2/one/", [], {"value": "one", "prefix": "xx"}),
("part2", "/prefix/xx/part2/", [], {"prefix": "xx"}),
# Tests for nested groups. Nested capturing groups will only work if you
# *only* supply the correct outer group.
("nested-noncapture", "/nested/noncapture/opt", [], {"p": "opt"}),
("nested-capture", "/nested/capture/opt/", ["opt/"], {}),
("nested-capture", NoReverseMatch, [], {"p": "opt"}),
("nested-mixedcapture", "/nested/capture/mixed/opt", ["opt"], {}),
("nested-mixedcapture", NoReverseMatch, [], {"p": "opt"}),
("nested-namedcapture", "/nested/capture/named/opt/", [], {"outer": "opt/"}),
("nested-namedcapture", NoReverseMatch, [], {"outer": "opt/", "inner": "opt"}),
("nested-namedcapture", NoReverseMatch, [], {"inner": "opt"}),
("non_path_include", "/includes/non_path_include/", [], {}),
# Tests for #13154
("defaults", "/defaults_view1/3/", [], {"arg1": 3, "arg2": 1}),
("defaults", "/defaults_view2/3/", [], {"arg1": 3, "arg2": 2}),
("defaults", NoReverseMatch, [], {"arg1": 3, "arg2": 3}),
("defaults", NoReverseMatch, [], {"arg2": 1}),
# Security tests
("security", "/%2Fexample.com/security/", ["/example.com"], {}),
)
@override_settings(ROOT_URLCONF="urlpatterns_reverse.no_urls")
class NoURLPatternsTests(SimpleTestCase):
def test_no_urls_exception(self):
"""
URLResolver should raise an exception when no urlpatterns exist.
"""
resolver = URLResolver(RegexPattern(r"^$"), settings.ROOT_URLCONF)
with self.assertRaisesMessage(
ImproperlyConfigured,
"The included URLconf 'urlpatterns_reverse.no_urls' does not "
"appear to have any patterns in it. If you see the 'urlpatterns' "
"variable with valid patterns in the file then the issue is "
"probably caused by a circular import.",
):
getattr(resolver, "url_patterns")
@override_settings(ROOT_URLCONF="urlpatterns_reverse.urls")
class URLPatternReverse(SimpleTestCase):
def test_urlpattern_reverse(self):
for name, expected, args, kwargs in test_data:
with self.subTest(name=name, args=args, kwargs=kwargs):
try:
got = reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.assertEqual(NoReverseMatch, expected)
else:
self.assertEqual(got, expected)
def test_reverse_none(self):
# Reversing None should raise an error, not return the last un-named view.
with self.assertRaises(NoReverseMatch):
reverse(None)
def test_mixing_args_and_kwargs(self):
msg = "Don't mix *args and **kwargs in call to reverse()!"
with self.assertRaisesMessage(ValueError, msg):
reverse("name", args=["a"], kwargs={"b": "c"})
@override_script_prefix("/{{invalid}}/")
def test_prefix_braces(self):
self.assertEqual(
"/%7B%7Binvalid%7D%7D/includes/non_path_include/",
reverse("non_path_include"),
)
def test_prefix_parenthesis(self):
# Parentheses are allowed and should not cause errors or be escaped
with override_script_prefix("/bogus)/"):
self.assertEqual(
"/bogus)/includes/non_path_include/", reverse("non_path_include")
)
with override_script_prefix("/(bogus)/"):
self.assertEqual(
"/(bogus)/includes/non_path_include/", reverse("non_path_include")
)
@override_script_prefix("/bump%20map/")
def test_prefix_format_char(self):
self.assertEqual(
"/bump%2520map/includes/non_path_include/", reverse("non_path_include")
)
@override_script_prefix("/%7Eme/")
def test_non_urlsafe_prefix_with_args(self):
# Regression for #20022, adjusted for #24013 because ~ is an unreserved
# character. Tests whether % is escaped.
self.assertEqual("/%257Eme/places/1/", reverse("places", args=[1]))
def test_patterns_reported(self):
# Regression for #17076
with self.assertRaisesMessage(
NoReverseMatch, r"1 pattern(s) tried: ['people/(?P<name>\\w+)/$']"
):
# this url exists, but requires an argument
reverse("people", args=[])
@override_script_prefix("/script:name/")
def test_script_name_escaping(self):
self.assertEqual(
reverse("optional", args=["foo:bar"]), "/script:name/optional/foo:bar/"
)
def test_view_not_found_message(self):
msg = (
"Reverse for 'nonexistent-view' not found. 'nonexistent-view' "
"is not a valid view function or pattern name."
)
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse("nonexistent-view")
def test_no_args_message(self):
msg = "Reverse for 'places' with no arguments not found. 1 pattern(s) tried:"
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse("places")
def test_illegal_args_message(self):
msg = (
"Reverse for 'places' with arguments '(1, 2)' not found. 1 pattern(s) "
"tried:"
)
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse("places", args=(1, 2))
def test_illegal_kwargs_message(self):
msg = (
"Reverse for 'places' with keyword arguments '{'arg1': 2}' not found. 1 "
"pattern(s) tried:"
)
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse("places", kwargs={"arg1": 2})
class ResolverTests(SimpleTestCase):
def test_resolver_repr(self):
"""
Test repr of URLResolver, especially when urlconf_name is a list
(#17892).
"""
# Pick a resolver from a namespaced URLconf
resolver = get_resolver("urlpatterns_reverse.namespace_urls")
sub_resolver = resolver.namespace_dict["test-ns1"][1]
self.assertIn("<URLPattern list>", repr(sub_resolver))
def test_reverse_lazy_object_coercion_by_resolve(self):
"""
Verifies lazy object returned by reverse_lazy is coerced to
text by resolve(). Previous to #21043, this would raise a TypeError.
"""
urls = "urlpatterns_reverse.named_urls"
proxy_url = reverse_lazy("named-url1", urlconf=urls)
resolver = get_resolver(urls)
resolver.resolve(proxy_url)
def test_resolver_reverse(self):
resolver = get_resolver("urlpatterns_reverse.named_urls")
test_urls = [
# (name, args, kwargs, expected)
("named-url1", (), {}, ""),
("named-url2", ("arg",), {}, "extra/arg/"),
("named-url2", (), {"extra": "arg"}, "extra/arg/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(resolver.reverse(name, *args, **kwargs), expected)
def test_resolver_reverse_conflict(self):
"""
URL pattern name arguments don't need to be unique. The last registered
pattern takes precedence for conflicting names.
"""
resolver = get_resolver("urlpatterns_reverse.named_urls_conflict")
test_urls = [
# (name, args, kwargs, expected)
# Without arguments, the last URL in urlpatterns has precedence.
("name-conflict", (), {}, "conflict/"),
# With an arg, the last URL in urlpatterns has precedence.
("name-conflict", ("arg",), {}, "conflict-last/arg/"),
# With a kwarg, other URL patterns can be reversed.
("name-conflict", (), {"first": "arg"}, "conflict-first/arg/"),
("name-conflict", (), {"middle": "arg"}, "conflict-middle/arg/"),
("name-conflict", (), {"last": "arg"}, "conflict-last/arg/"),
# The number and order of the arguments don't interfere with reversing.
("name-conflict", ("arg", "arg"), {}, "conflict/arg/arg/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(resolver.reverse(name, *args, **kwargs), expected)
def test_non_regex(self):
"""
A Resolver404 is raised if resolving doesn't meet the basic
requirements of a path to match - i.e., at the very least, it matches
the root pattern '^/'. Never return None from resolve() to prevent a
TypeError from occurring later (#10834).
"""
test_urls = ["", "a", "\\", "."]
for path_ in test_urls:
with self.subTest(path=path_):
with self.assertRaises(Resolver404):
resolve(path_)
def test_404_tried_urls_have_names(self):
"""
The list of URLs that come back from a Resolver404 exception contains
a list in the right format for printing out in the DEBUG 404 page with
both the patterns and URL names, if available.
"""
urls = "urlpatterns_reverse.named_urls"
# this list matches the expected URL types and names returned when
# you try to resolve a nonexistent URL in the first level of included
# URLs in named_urls.py (e.g., '/included/nonexistent-url')
url_types_names = [
[{"type": URLPattern, "name": "named-url1"}],
[{"type": URLPattern, "name": "named-url2"}],
[{"type": URLPattern, "name": None}],
[{"type": URLResolver}, {"type": URLPattern, "name": "named-url3"}],
[{"type": URLResolver}, {"type": URLPattern, "name": "named-url4"}],
[{"type": URLResolver}, {"type": URLPattern, "name": None}],
[{"type": URLResolver}, {"type": URLResolver}],
]
with self.assertRaisesMessage(Resolver404, "tried") as cm:
resolve("/included/nonexistent-url", urlconf=urls)
e = cm.exception
# make sure we at least matched the root ('/') url resolver:
self.assertIn("tried", e.args[0])
self.assertEqual(
len(e.args[0]["tried"]),
len(url_types_names),
"Wrong number of tried URLs returned. Expected %s, got %s."
% (len(url_types_names), len(e.args[0]["tried"])),
)
for tried, expected in zip(e.args[0]["tried"], url_types_names):
for t, e in zip(tried, expected):
with self.subTest(t):
self.assertIsInstance(
t, e["type"]
), "%s is not an instance of %s" % (t, e["type"])
if "name" in e:
if not e["name"]:
self.assertIsNone(
t.name, "Expected no URL name but found %s." % t.name
)
else:
self.assertEqual(
t.name,
e["name"],
'Wrong URL name. Expected "%s", got "%s".'
% (e["name"], t.name),
)
def test_namespaced_view_detail(self):
resolver = get_resolver("urlpatterns_reverse.nested_urls")
self.assertTrue(resolver._is_callback("urlpatterns_reverse.nested_urls.view1"))
self.assertTrue(resolver._is_callback("urlpatterns_reverse.nested_urls.view2"))
self.assertTrue(resolver._is_callback("urlpatterns_reverse.nested_urls.View3"))
self.assertFalse(resolver._is_callback("urlpatterns_reverse.nested_urls.blub"))
def test_view_detail_as_method(self):
# Views which have a class name as part of their path.
resolver = get_resolver("urlpatterns_reverse.method_view_urls")
self.assertTrue(
resolver._is_callback(
"urlpatterns_reverse.method_view_urls.ViewContainer.method_view"
)
)
self.assertTrue(
resolver._is_callback(
"urlpatterns_reverse.method_view_urls.ViewContainer.classmethod_view"
)
)
def test_populate_concurrency(self):
"""
URLResolver._populate() can be called concurrently, but not more
than once per thread (#26888).
"""
resolver = URLResolver(RegexPattern(r"^/"), "urlpatterns_reverse.urls")
resolver._local.populating = True
thread = threading.Thread(target=resolver._populate)
thread.start()
thread.join()
self.assertNotEqual(resolver._reverse_dict, {})
@override_settings(ROOT_URLCONF="urlpatterns_reverse.reverse_lazy_urls")
class ReverseLazyTest(TestCase):
def test_redirect_with_lazy_reverse(self):
response = self.client.get("/redirect/")
self.assertRedirects(response, "/redirected_to/", status_code=302)
def test_user_permission_with_lazy_reverse(self):
alfred = User.objects.create_user(
"alfred", "[email protected]", password="testpw"
)
response = self.client.get("/login_required_view/")
self.assertRedirects(
response, "/login/?next=/login_required_view/", status_code=302
)
self.client.force_login(alfred)
response = self.client.get("/login_required_view/")
self.assertEqual(response.status_code, 200)
def test_inserting_reverse_lazy_into_string(self):
self.assertEqual(
"Some URL: %s" % reverse_lazy("some-login-page"), "Some URL: /login/"
)
def test_build_absolute_uri(self):
factory = RequestFactory()
request = factory.get("/")
self.assertEqual(
request.build_absolute_uri(reverse_lazy("some-login-page")),
"http://testserver/login/",
)
class ReverseLazySettingsTest(AdminScriptTestCase):
"""
reverse_lazy can be used in settings without causing a circular
import error.
"""
def setUp(self):
super().setUp()
self.write_settings(
"settings.py",
extra=(
"from django.urls import reverse_lazy\n"
"LOGIN_URL = reverse_lazy('login')"
),
)
def test_lazy_in_settings(self):
out, err = self.run_manage(["check"])
self.assertNoOutput(err)
@override_settings(ROOT_URLCONF="urlpatterns_reverse.urls")
class ReverseShortcutTests(SimpleTestCase):
def test_redirect_to_object(self):
# We don't really need a model; just something with a get_absolute_url
class FakeObj:
def get_absolute_url(self):
return "/hi-there/"
res = redirect(FakeObj())
self.assertIsInstance(res, HttpResponseRedirect)
self.assertEqual(res.url, "/hi-there/")
res = redirect(FakeObj(), permanent=True)
self.assertIsInstance(res, HttpResponsePermanentRedirect)
self.assertEqual(res.url, "/hi-there/")
def test_redirect_to_view_name(self):
res = redirect("hardcoded2")
self.assertEqual(res.url, "/hardcoded/doc.pdf")
res = redirect("places", 1)
self.assertEqual(res.url, "/places/1/")
res = redirect("headlines", year="2008", month="02", day="17")
self.assertEqual(res.url, "/headlines/2008.02.17/")
with self.assertRaises(NoReverseMatch):
redirect("not-a-view")
def test_redirect_to_url(self):
res = redirect("/foo/")
self.assertEqual(res.url, "/foo/")
res = redirect("http://example.com/")
self.assertEqual(res.url, "http://example.com/")
# Assert that we can redirect using UTF-8 strings
res = redirect("/æøå/abc/")
self.assertEqual(res.url, "/%C3%A6%C3%B8%C3%A5/abc/")
# Assert that no imports are attempted when dealing with a relative path
# (previously, the below would resolve in a UnicodeEncodeError from __import__ )
res = redirect("/æøå.abc/")
self.assertEqual(res.url, "/%C3%A6%C3%B8%C3%A5.abc/")
res = redirect("os.path")
self.assertEqual(res.url, "os.path")
def test_no_illegal_imports(self):
# modules that are not listed in urlpatterns should not be importable
redirect("urlpatterns_reverse.nonimported_module.view")
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_reverse_by_path_nested(self):
# Views added to urlpatterns using include() should be reversible.
from .views import nested_view
self.assertEqual(reverse(nested_view), "/includes/nested_path/")
def test_redirect_view_object(self):
from .views import absolute_kwargs_view
res = redirect(absolute_kwargs_view)
self.assertEqual(res.url, "/absolute_arg_view/")
with self.assertRaises(NoReverseMatch):
redirect(absolute_kwargs_view, wrong_argument=None)
@override_settings(ROOT_URLCONF="urlpatterns_reverse.namespace_urls")
class NamespaceTests(SimpleTestCase):
def test_ambiguous_object(self):
"""
Names deployed via dynamic URL objects that require namespaces can't
be resolved.
"""
test_urls = [
("urlobject-view", [], {}),
("urlobject-view", [37, 42], {}),
("urlobject-view", [], {"arg1": 42, "arg2": 37}),
]
for name, args, kwargs in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
with self.assertRaises(NoReverseMatch):
reverse(name, args=args, kwargs=kwargs)
def test_ambiguous_urlpattern(self):
"""
Names deployed via dynamic URL objects that require namespaces can't
be resolved.
"""
test_urls = [
("inner-nothing", [], {}),
("inner-nothing", [37, 42], {}),
("inner-nothing", [], {"arg1": 42, "arg2": 37}),
]
for name, args, kwargs in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
with self.assertRaises(NoReverseMatch):
reverse(name, args=args, kwargs=kwargs)
def test_non_existent_namespace(self):
"""Nonexistent namespaces raise errors."""
test_urls = [
"blahblah:urlobject-view",
"test-ns1:blahblah:urlobject-view",
]
for name in test_urls:
with self.subTest(name=name):
with self.assertRaises(NoReverseMatch):
reverse(name)
def test_normal_name(self):
"""Normal lookups work as expected."""
test_urls = [
("normal-view", [], {}, "/normal/"),
("normal-view", [37, 42], {}, "/normal/37/42/"),
("normal-view", [], {"arg1": 42, "arg2": 37}, "/normal/42/37/"),
("special-view", [], {}, "/+%5C$*/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_simple_included_name(self):
"""Normal lookups work on names included from other patterns."""
test_urls = [
("included_namespace_urls:inc-normal-view", [], {}, "/included/normal/"),
(
"included_namespace_urls:inc-normal-view",
[37, 42],
{},
"/included/normal/37/42/",
),
(
"included_namespace_urls:inc-normal-view",
[],
{"arg1": 42, "arg2": 37},
"/included/normal/42/37/",
),
("included_namespace_urls:inc-special-view", [], {}, "/included/+%5C$*/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespace_object(self):
"""Dynamic URL objects can be found using a namespace."""
test_urls = [
("test-ns1:urlobject-view", [], {}, "/test1/inner/"),
("test-ns1:urlobject-view", [37, 42], {}, "/test1/inner/37/42/"),
(
"test-ns1:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"/test1/inner/42/37/",
),
("test-ns1:urlobject-special-view", [], {}, "/test1/inner/+%5C$*/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_object(self):
"""
Dynamic URL objects can return a (pattern, app_name) 2-tuple, and
include() can set the namespace.
"""
test_urls = [
("new-ns1:urlobject-view", [], {}, "/newapp1/inner/"),
("new-ns1:urlobject-view", [37, 42], {}, "/newapp1/inner/37/42/"),
(
"new-ns1:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"/newapp1/inner/42/37/",
),
("new-ns1:urlobject-special-view", [], {}, "/newapp1/inner/+%5C$*/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_object_default_namespace(self):
"""
Namespace defaults to app_name when including a (pattern, app_name)
2-tuple.
"""
test_urls = [
("newapp:urlobject-view", [], {}, "/new-default/inner/"),
("newapp:urlobject-view", [37, 42], {}, "/new-default/inner/37/42/"),
(
"newapp:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"/new-default/inner/42/37/",
),
("newapp:urlobject-special-view", [], {}, "/new-default/inner/+%5C$*/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_embedded_namespace_object(self):
"""Namespaces can be installed anywhere in the URL pattern tree."""
test_urls = [
(
"included_namespace_urls:test-ns3:urlobject-view",
[],
{},
"/included/test3/inner/",
),
(
"included_namespace_urls:test-ns3:urlobject-view",
[37, 42],
{},
"/included/test3/inner/37/42/",
),
(
"included_namespace_urls:test-ns3:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"/included/test3/inner/42/37/",
),
(
"included_namespace_urls:test-ns3:urlobject-special-view",
[],
{},
"/included/test3/inner/+%5C$*/",
),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespace_pattern(self):
"""Namespaces can be applied to include()'d urlpatterns."""
test_urls = [
("inc-ns1:inc-normal-view", [], {}, "/ns-included1/normal/"),
("inc-ns1:inc-normal-view", [37, 42], {}, "/ns-included1/normal/37/42/"),
(
"inc-ns1:inc-normal-view",
[],
{"arg1": 42, "arg2": 37},
"/ns-included1/normal/42/37/",
),
("inc-ns1:inc-special-view", [], {}, "/ns-included1/+%5C$*/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_name_pattern(self):
"""
Namespaces can be applied to include()'d urlpatterns that set an
app_name attribute.
"""
test_urls = [
("app-ns1:inc-normal-view", [], {}, "/app-included1/normal/"),
("app-ns1:inc-normal-view", [37, 42], {}, "/app-included1/normal/37/42/"),
(
"app-ns1:inc-normal-view",
[],
{"arg1": 42, "arg2": 37},
"/app-included1/normal/42/37/",
),
("app-ns1:inc-special-view", [], {}, "/app-included1/+%5C$*/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespace_pattern_with_variable_prefix(self):
"""
Using include() with namespaces when there is a regex variable in front
of it.
"""
test_urls = [
("inc-outer:inc-normal-view", [], {"outer": 42}, "/ns-outer/42/normal/"),
("inc-outer:inc-normal-view", [42], {}, "/ns-outer/42/normal/"),
(
"inc-outer:inc-normal-view",
[],
{"arg1": 37, "arg2": 4, "outer": 42},
"/ns-outer/42/normal/37/4/",
),
("inc-outer:inc-normal-view", [42, 37, 4], {}, "/ns-outer/42/normal/37/4/"),
("inc-outer:inc-special-view", [], {"outer": 42}, "/ns-outer/42/+%5C$*/"),
("inc-outer:inc-special-view", [42], {}, "/ns-outer/42/+%5C$*/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_multiple_namespace_pattern(self):
"""Namespaces can be embedded."""
test_urls = [
("inc-ns1:test-ns3:urlobject-view", [], {}, "/ns-included1/test3/inner/"),
(
"inc-ns1:test-ns3:urlobject-view",
[37, 42],
{},
"/ns-included1/test3/inner/37/42/",
),
(
"inc-ns1:test-ns3:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"/ns-included1/test3/inner/42/37/",
),
(
"inc-ns1:test-ns3:urlobject-special-view",
[],
{},
"/ns-included1/test3/inner/+%5C$*/",
),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_nested_namespace_pattern(self):
"""Namespaces can be nested."""
test_urls = [
(
"inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view",
[],
{},
"/ns-included1/ns-included4/ns-included1/test3/inner/",
),
(
"inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view",
[37, 42],
{},
"/ns-included1/ns-included4/ns-included1/test3/inner/37/42/",
),
(
"inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"/ns-included1/ns-included4/ns-included1/test3/inner/42/37/",
),
(
"inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view",
[],
{},
"/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/",
),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_lookup_object(self):
"""A default application namespace can be used for lookup."""
test_urls = [
("testapp:urlobject-view", [], {}, "/default/inner/"),
("testapp:urlobject-view", [37, 42], {}, "/default/inner/37/42/"),
(
"testapp:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"/default/inner/42/37/",
),
("testapp:urlobject-special-view", [], {}, "/default/inner/+%5C$*/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_lookup_object_with_default(self):
"""A default application namespace is sensitive to the current app."""
test_urls = [
("testapp:urlobject-view", [], {}, "test-ns3", "/default/inner/"),
(
"testapp:urlobject-view",
[37, 42],
{},
"test-ns3",
"/default/inner/37/42/",
),
(
"testapp:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"test-ns3",
"/default/inner/42/37/",
),
(
"testapp:urlobject-special-view",
[],
{},
"test-ns3",
"/default/inner/+%5C$*/",
),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(
name=name, args=args, kwargs=kwargs, current_app=current_app
):
self.assertEqual(
reverse(name, args=args, kwargs=kwargs, current_app=current_app),
expected,
)
def test_app_lookup_object_without_default(self):
"""
An application namespace without a default is sensitive to the current
app.
"""
test_urls = [
("nodefault:urlobject-view", [], {}, None, "/other2/inner/"),
("nodefault:urlobject-view", [37, 42], {}, None, "/other2/inner/37/42/"),
(
"nodefault:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
None,
"/other2/inner/42/37/",
),
("nodefault:urlobject-special-view", [], {}, None, "/other2/inner/+%5C$*/"),
("nodefault:urlobject-view", [], {}, "other-ns1", "/other1/inner/"),
(
"nodefault:urlobject-view",
[37, 42],
{},
"other-ns1",
"/other1/inner/37/42/",
),
(
"nodefault:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"other-ns1",
"/other1/inner/42/37/",
),
(
"nodefault:urlobject-special-view",
[],
{},
"other-ns1",
"/other1/inner/+%5C$*/",
),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(
name=name, args=args, kwargs=kwargs, current_app=current_app
):
self.assertEqual(
reverse(name, args=args, kwargs=kwargs, current_app=current_app),
expected,
)
def test_special_chars_namespace(self):
test_urls = [
(
"special:included_namespace_urls:inc-normal-view",
[],
{},
"/+%5C$*/included/normal/",
),
(
"special:included_namespace_urls:inc-normal-view",
[37, 42],
{},
"/+%5C$*/included/normal/37/42/",
),
(
"special:included_namespace_urls:inc-normal-view",
[],
{"arg1": 42, "arg2": 37},
"/+%5C$*/included/normal/42/37/",
),
(
"special:included_namespace_urls:inc-special-view",
[],
{},
"/+%5C$*/included/+%5C$*/",
),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespaces_with_variables(self):
"""Namespace prefixes can capture variables."""
test_urls = [
("inc-ns5:inner-nothing", [], {"outer": "70"}, "/inc70/"),
(
"inc-ns5:inner-extra",
[],
{"extra": "foobar", "outer": "78"},
"/inc78/extra/foobar/",
),
("inc-ns5:inner-nothing", ["70"], {}, "/inc70/"),
("inc-ns5:inner-extra", ["78", "foobar"], {}, "/inc78/extra/foobar/"),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_nested_app_lookup(self):
"""
A nested current_app should be split in individual namespaces (#24904).
"""
test_urls = [
(
"inc-ns1:testapp:urlobject-view",
[],
{},
None,
"/ns-included1/test4/inner/",
),
(
"inc-ns1:testapp:urlobject-view",
[37, 42],
{},
None,
"/ns-included1/test4/inner/37/42/",
),
(
"inc-ns1:testapp:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
None,
"/ns-included1/test4/inner/42/37/",
),
(
"inc-ns1:testapp:urlobject-special-view",
[],
{},
None,
"/ns-included1/test4/inner/+%5C$*/",
),
(
"inc-ns1:testapp:urlobject-view",
[],
{},
"inc-ns1:test-ns3",
"/ns-included1/test3/inner/",
),
(
"inc-ns1:testapp:urlobject-view",
[37, 42],
{},
"inc-ns1:test-ns3",
"/ns-included1/test3/inner/37/42/",
),
(
"inc-ns1:testapp:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"inc-ns1:test-ns3",
"/ns-included1/test3/inner/42/37/",
),
(
"inc-ns1:testapp:urlobject-special-view",
[],
{},
"inc-ns1:test-ns3",
"/ns-included1/test3/inner/+%5C$*/",
),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(
name=name, args=args, kwargs=kwargs, current_app=current_app
):
self.assertEqual(
reverse(name, args=args, kwargs=kwargs, current_app=current_app),
expected,
)
def test_current_app_no_partial_match(self):
"""current_app shouldn't be used unless it matches the whole path."""
test_urls = [
(
"inc-ns1:testapp:urlobject-view",
[],
{},
"nonexistent:test-ns3",
"/ns-included1/test4/inner/",
),
(
"inc-ns1:testapp:urlobject-view",
[37, 42],
{},
"nonexistent:test-ns3",
"/ns-included1/test4/inner/37/42/",
),
(
"inc-ns1:testapp:urlobject-view",
[],
{"arg1": 42, "arg2": 37},
"nonexistent:test-ns3",
"/ns-included1/test4/inner/42/37/",
),
(
"inc-ns1:testapp:urlobject-special-view",
[],
{},
"nonexistent:test-ns3",
"/ns-included1/test4/inner/+%5C$*/",
),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(
name=name, args=args, kwargs=kwargs, current_app=current_app
):
self.assertEqual(
reverse(name, args=args, kwargs=kwargs, current_app=current_app),
expected,
)
@override_settings(ROOT_URLCONF=urlconf_outer.__name__)
class RequestURLconfTests(SimpleTestCase):
def test_urlconf(self):
response = self.client.get("/test/me/")
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.content, b"outer:/test/me/,inner:/inner_urlconf/second_test/"
)
response = self.client.get("/inner_urlconf/second_test/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/second_test/")
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE=[
"%s.ChangeURLconfMiddleware" % middleware.__name__,
]
)
def test_urlconf_overridden(self):
response = self.client.get("/test/me/")
self.assertEqual(response.status_code, 404)
response = self.client.get("/inner_urlconf/second_test/")
self.assertEqual(response.status_code, 404)
response = self.client.get("/second_test/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"outer:,inner:/second_test/")
@override_settings(
MIDDLEWARE=[
"%s.NullChangeURLconfMiddleware" % middleware.__name__,
]
)
def test_urlconf_overridden_with_null(self):
"""
Overriding request.urlconf with None will fall back to the default
URLconf.
"""
response = self.client.get("/test/me/")
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.content, b"outer:/test/me/,inner:/inner_urlconf/second_test/"
)
response = self.client.get("/inner_urlconf/second_test/")
self.assertEqual(response.status_code, 200)
response = self.client.get("/second_test/")
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE=[
"%s.ChangeURLconfMiddleware" % middleware.__name__,
"%s.ReverseInnerInResponseMiddleware" % middleware.__name__,
]
)
def test_reverse_inner_in_response_middleware(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a response middleware.
"""
response = self.client.get("/second_test/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"/second_test/")
@override_settings(
MIDDLEWARE=[
"%s.ChangeURLconfMiddleware" % middleware.__name__,
"%s.ReverseOuterInResponseMiddleware" % middleware.__name__,
]
)
def test_reverse_outer_in_response_middleware(self):
"""
Test reversing an URL from the *default* URLconf from inside
a response middleware.
"""
msg = (
"Reverse for 'outer' not found. 'outer' is not a valid view "
"function or pattern name."
)
with self.assertRaisesMessage(NoReverseMatch, msg):
self.client.get("/second_test/")
@override_settings(
MIDDLEWARE=[
"%s.ChangeURLconfMiddleware" % middleware.__name__,
"%s.ReverseInnerInStreaming" % middleware.__name__,
]
)
def test_reverse_inner_in_streaming(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a streaming response.
"""
response = self.client.get("/second_test/")
self.assertEqual(response.status_code, 200)
self.assertEqual(b"".join(response), b"/second_test/")
@override_settings(
MIDDLEWARE=[
"%s.ChangeURLconfMiddleware" % middleware.__name__,
"%s.ReverseOuterInStreaming" % middleware.__name__,
]
)
def test_reverse_outer_in_streaming(self):
"""
Test reversing an URL from the *default* URLconf from inside
a streaming response.
"""
message = "Reverse for 'outer' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get("/second_test/")
b"".join(self.client.get("/second_test/"))
def test_urlconf_is_reset_after_request(self):
"""The URLconf is reset after each request."""
self.assertIsNone(get_urlconf())
with override_settings(
MIDDLEWARE=["%s.ChangeURLconfMiddleware" % middleware.__name__]
):
self.client.get(reverse("inner"))
self.assertIsNone(get_urlconf())
class ErrorHandlerResolutionTests(SimpleTestCase):
"""Tests for handler400, handler404 and handler500"""
def setUp(self):
urlconf = "urlpatterns_reverse.urls_error_handlers"
urlconf_callables = "urlpatterns_reverse.urls_error_handlers_callables"
self.resolver = URLResolver(RegexPattern(r"^$"), urlconf)
self.callable_resolver = URLResolver(RegexPattern(r"^$"), urlconf_callables)
def test_named_handlers(self):
for code in [400, 404, 500]:
with self.subTest(code=code):
self.assertEqual(self.resolver.resolve_error_handler(code), empty_view)
def test_callable_handlers(self):
for code in [400, 404, 500]:
with self.subTest(code=code):
self.assertEqual(
self.callable_resolver.resolve_error_handler(code), empty_view
)
@override_settings(ROOT_URLCONF="urlpatterns_reverse.urls_without_handlers")
class DefaultErrorHandlerTests(SimpleTestCase):
def test_default_handler(self):
"If the urls.py doesn't specify handlers, the defaults are used"
response = self.client.get("/test/")
self.assertEqual(response.status_code, 404)
msg = "I don't think I'm getting good value for this view"
with self.assertRaisesMessage(ValueError, msg):
self.client.get("/bad_view/")
@override_settings(ROOT_URLCONF=None)
class NoRootUrlConfTests(SimpleTestCase):
"""Tests for handler404 and handler500 if ROOT_URLCONF is None"""
def test_no_handler_exception(self):
msg = (
"The included URLconf 'None' does not appear to have any patterns "
"in it. If you see the 'urlpatterns' variable with valid patterns "
"in the file then the issue is probably caused by a circular "
"import."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/test/me/")
@override_settings(ROOT_URLCONF="urlpatterns_reverse.namespace_urls")
class ResolverMatchTests(SimpleTestCase):
def test_urlpattern_resolve(self):
for (
path_,
url_name,
app_name,
namespace,
view_name,
func,
args,
kwargs,
) in resolve_test_data:
with self.subTest(path=path_):
# Legacy support for extracting "function, args, kwargs".
match_func, match_args, match_kwargs = resolve(path_)
self.assertEqual(match_func, func)
self.assertEqual(match_args, args)
self.assertEqual(match_kwargs, kwargs)
# ResolverMatch capabilities.
match = resolve(path_)
self.assertEqual(match.__class__, ResolverMatch)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.namespace, namespace)
self.assertEqual(match.view_name, view_name)
self.assertEqual(match.func, func)
self.assertEqual(match.args, args)
self.assertEqual(match.kwargs, kwargs)
# and for legacy purposes:
self.assertEqual(match[0], func)
self.assertEqual(match[1], args)
self.assertEqual(match[2], kwargs)
def test_resolver_match_on_request(self):
response = self.client.get("/resolver_match/")
resolver_match = response.resolver_match
self.assertEqual(resolver_match.url_name, "test-resolver-match")
def test_resolver_match_on_request_before_resolution(self):
request = HttpRequest()
self.assertIsNone(request.resolver_match)
def test_repr(self):
self.assertEqual(
repr(resolve("/no_kwargs/42/37/")),
"ResolverMatch(func=urlpatterns_reverse.views.empty_view, "
"args=('42', '37'), kwargs={}, url_name='no-kwargs', app_names=[], "
"namespaces=[], route='^no_kwargs/([0-9]+)/([0-9]+)/$')",
)
@override_settings(ROOT_URLCONF="urlpatterns_reverse.reverse_lazy_urls")
def test_classbased_repr(self):
self.assertEqual(
repr(resolve("/redirect/")),
"ResolverMatch(func=urlpatterns_reverse.views.LazyRedirectView, "
"args=(), kwargs={}, url_name=None, app_names=[], "
"namespaces=[], route='redirect/')",
)
@override_settings(ROOT_URLCONF="urlpatterns_reverse.urls")
def test_repr_functools_partial(self):
tests = [
("partial", "template.html"),
("partial_nested", "nested_partial.html"),
("partial_wrapped", "template.html"),
]
for name, template_name in tests:
with self.subTest(name=name):
func = (
f"functools.partial({views.empty_view!r}, "
f"template_name='{template_name}')"
)
self.assertEqual(
repr(resolve(f"/{name}/")),
f"ResolverMatch(func={func}, args=(), kwargs={{}}, "
f"url_name='{name}', app_names=[], namespaces=[], "
f"route='{name}/')",
)
@override_settings(ROOT_URLCONF="urlpatterns.path_urls")
def test_pickling(self):
msg = "Cannot pickle ResolverMatch."
with self.assertRaisesMessage(pickle.PicklingError, msg):
pickle.dumps(resolve("/users/"))
@override_settings(ROOT_URLCONF="urlpatterns_reverse.erroneous_urls")
class ErroneousViewTests(SimpleTestCase):
def test_noncallable_view(self):
# View is not a callable (explicit import; arbitrary Python object)
with self.assertRaisesMessage(TypeError, "view must be a callable"):
path("uncallable-object/", views.uncallable)
def test_invalid_regex(self):
# Regex contains an error (refs #6170)
msg = '(regex_error/$" is not a valid regular expression'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
reverse(views.empty_view)
class ViewLoadingTests(SimpleTestCase):
def test_view_loading(self):
self.assertEqual(
get_callable("urlpatterns_reverse.views.empty_view"), empty_view
)
self.assertEqual(get_callable(empty_view), empty_view)
def test_view_does_not_exist(self):
msg = "View does not exist in module urlpatterns_reverse.views."
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable("urlpatterns_reverse.views.i_should_not_exist")
def test_attributeerror_not_hidden(self):
msg = "I am here to confuse django.urls.get_callable"
with self.assertRaisesMessage(AttributeError, msg):
get_callable("urlpatterns_reverse.views_broken.i_am_broken")
def test_non_string_value(self):
msg = "'1' is not a callable or a dot-notation path"
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable(1)
def test_string_without_dot(self):
msg = "Could not import 'test'. The path must be fully qualified."
with self.assertRaisesMessage(ImportError, msg):
get_callable("test")
def test_module_does_not_exist(self):
with self.assertRaisesMessage(ImportError, "No module named 'foo'"):
get_callable("foo.bar")
def test_parent_module_does_not_exist(self):
msg = "Parent module urlpatterns_reverse.foo does not exist."
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable("urlpatterns_reverse.foo.bar")
def test_not_callable(self):
msg = (
"Could not import 'urlpatterns_reverse.tests.resolve_test_data'. "
"View is not callable."
)
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable("urlpatterns_reverse.tests.resolve_test_data")
class IncludeTests(SimpleTestCase):
url_patterns = [
path("inner/", views.empty_view, name="urlobject-view"),
re_path(
r"^inner/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$",
views.empty_view,
name="urlobject-view",
),
re_path(r"^inner/\+\\\$\*/$", views.empty_view, name="urlobject-special-view"),
]
app_urls = URLObject("inc-app")
def test_include_urls(self):
self.assertEqual(include(self.url_patterns), (self.url_patterns, None, None))
def test_include_namespace(self):
msg = (
"Specifying a namespace in include() without providing an "
"app_name is not supported."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include(self.url_patterns, "namespace")
def test_include_4_tuple(self):
msg = "Passing a 4-tuple to include() is not supported."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include((self.url_patterns, "app_name", "namespace", "blah"))
def test_include_3_tuple(self):
msg = "Passing a 3-tuple to include() is not supported."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include((self.url_patterns, "app_name", "namespace"))
def test_include_3_tuple_namespace(self):
msg = (
"Cannot override the namespace for a dynamic module that provides a "
"namespace."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include((self.url_patterns, "app_name", "namespace"), "namespace")
def test_include_2_tuple(self):
self.assertEqual(
include((self.url_patterns, "app_name")),
(self.url_patterns, "app_name", "app_name"),
)
def test_include_2_tuple_namespace(self):
self.assertEqual(
include((self.url_patterns, "app_name"), namespace="namespace"),
(self.url_patterns, "app_name", "namespace"),
)
def test_include_app_name(self):
self.assertEqual(include(self.app_urls), (self.app_urls, "inc-app", "inc-app"))
def test_include_app_name_namespace(self):
self.assertEqual(
include(self.app_urls, "namespace"), (self.app_urls, "inc-app", "namespace")
)
@override_settings(ROOT_URLCONF="urlpatterns_reverse.urls")
class LookaheadTests(SimpleTestCase):
def test_valid_resolve(self):
test_urls = [
"/lookahead-/a-city/",
"/lookbehind-/a-city/",
"/lookahead+/a-city/",
"/lookbehind+/a-city/",
]
for test_url in test_urls:
with self.subTest(url=test_url):
self.assertEqual(resolve(test_url).kwargs, {"city": "a-city"})
def test_invalid_resolve(self):
test_urls = [
"/lookahead-/not-a-city/",
"/lookbehind-/not-a-city/",
"/lookahead+/other-city/",
"/lookbehind+/other-city/",
]
for test_url in test_urls:
with self.subTest(url=test_url):
with self.assertRaises(Resolver404):
resolve(test_url)
def test_valid_reverse(self):
test_urls = [
("lookahead-positive", {"city": "a-city"}, "/lookahead+/a-city/"),
("lookahead-negative", {"city": "a-city"}, "/lookahead-/a-city/"),
("lookbehind-positive", {"city": "a-city"}, "/lookbehind+/a-city/"),
("lookbehind-negative", {"city": "a-city"}, "/lookbehind-/a-city/"),
]
for name, kwargs, expected in test_urls:
with self.subTest(name=name, kwargs=kwargs):
self.assertEqual(reverse(name, kwargs=kwargs), expected)
def test_invalid_reverse(self):
test_urls = [
("lookahead-positive", {"city": "other-city"}),
("lookahead-negative", {"city": "not-a-city"}),
("lookbehind-positive", {"city": "other-city"}),
("lookbehind-negative", {"city": "not-a-city"}),
]
for name, kwargs in test_urls:
with self.subTest(name=name, kwargs=kwargs):
with self.assertRaises(NoReverseMatch):
reverse(name, kwargs=kwargs)
|
049d59f0ae7d23e85c40e93d0450500eff178cf58a98d8e624d5cefb130ce3fc | from django.urls import re_path
from .views import empty_view
urlpatterns = [
re_path("^inner-no-kwargs/([0-9]+)/$", empty_view, name="inner-no-kwargs")
]
|
491def10e9e657bef008483f7f61c79759f6f3efce13fc090735d2528d4f6d81 | # A URLconf that doesn't define any handlerXXX.
from django.urls import path
from .views import bad_view, empty_view
urlpatterns = [
path("test_view/", empty_view, name="test_view"),
path("bad_view/", bad_view, name="bad_view"),
]
|
c71c823e57afd7ce8830954bb3d08fcc25a4eb72b170455652e986adb71581de | from django.urls import include, path, re_path
from . import views
from .utils import URLObject
testobj1 = URLObject("testapp", "test-ns1")
testobj2 = URLObject("testapp", "test-ns2")
default_testobj = URLObject("testapp", "testapp")
otherobj1 = URLObject("nodefault", "other-ns1")
otherobj2 = URLObject("nodefault", "other-ns2")
newappobj1 = URLObject("newapp")
app_name = "namespace_urls"
urlpatterns = [
path("normal/", views.empty_view, name="normal-view"),
re_path(
r"^normal/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$",
views.empty_view,
name="normal-view",
),
path("resolver_match/", views.pass_resolver_match_view, name="test-resolver-match"),
re_path(r"^\+\\\$\*/$", views.empty_view, name="special-view"),
re_path(
r"^mixed_args/([0-9]+)/(?P<arg2>[0-9]+)/$", views.empty_view, name="mixed-args"
),
re_path(r"^no_kwargs/([0-9]+)/([0-9]+)/$", views.empty_view, name="no-kwargs"),
re_path(
r"^view_class/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$",
views.view_class_instance,
name="view-class",
),
re_path(r"^unnamed/normal/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$", views.empty_view),
re_path(
r"^unnamed/view_class/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$",
views.view_class_instance,
),
path("test1/", include(*testobj1.urls)),
path("test2/", include(*testobj2.urls)),
path("default/", include(*default_testobj.urls)),
path("other1/", include(*otherobj1.urls)),
re_path(r"^other[246]/", include(*otherobj2.urls)),
path("newapp1/", include(newappobj1.app_urls, "new-ns1")),
path("new-default/", include(newappobj1.app_urls)),
re_path(
r"^app-included[135]/",
include("urlpatterns_reverse.included_app_urls", namespace="app-ns1"),
),
path(
"app-included2/",
include("urlpatterns_reverse.included_app_urls", namespace="app-ns2"),
),
re_path(
r"^ns-included[135]/",
include("urlpatterns_reverse.included_namespace_urls", namespace="inc-ns1"),
),
path(
"ns-included2/",
include("urlpatterns_reverse.included_namespace_urls", namespace="inc-ns2"),
),
path(
"app-included/",
include("urlpatterns_reverse.included_namespace_urls", "inc-app"),
),
path("included/", include("urlpatterns_reverse.included_namespace_urls")),
re_path(
r"^inc(?P<outer>[0-9]+)/",
include(
("urlpatterns_reverse.included_urls", "included_urls"), namespace="inc-ns5"
),
),
re_path(
r"^included/([0-9]+)/", include("urlpatterns_reverse.included_namespace_urls")
),
re_path(
r"^ns-outer/(?P<outer>[0-9]+)/",
include("urlpatterns_reverse.included_namespace_urls", namespace="inc-outer"),
),
re_path(
r"^\+\\\$\*/",
include("urlpatterns_reverse.namespace_urls", namespace="special"),
),
]
|
6c441ab8f73ea68c2beae2f3165a2700f690c624f66a195b864f6d0a486de320 | from django.urls import re_path
from . import views
urlpatterns = [
re_path(r"(regex_error/$", views.empty_view),
]
|
9d849759965a11104f0cd9dcd58c12785c364425bdc46b83935325ac3a118624 | from django.urls import include, path, re_path
from .utils import URLObject
from .views import empty_view, view_class_instance
testobj3 = URLObject("testapp", "test-ns3")
testobj4 = URLObject("testapp", "test-ns4")
app_name = "included_namespace_urls"
urlpatterns = [
path("normal/", empty_view, name="inc-normal-view"),
re_path(
"^normal/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$",
empty_view,
name="inc-normal-view",
),
re_path(r"^\+\\\$\*/$", empty_view, name="inc-special-view"),
re_path(
"^mixed_args/([0-9]+)/(?P<arg2>[0-9]+)/$", empty_view, name="inc-mixed-args"
),
re_path("^no_kwargs/([0-9]+)/([0-9]+)/$", empty_view, name="inc-no-kwargs"),
re_path(
"^view_class/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$",
view_class_instance,
name="inc-view-class",
),
path("test3/", include(*testobj3.urls)),
path("test4/", include(*testobj4.urls)),
path(
"ns-included3/",
include(
("urlpatterns_reverse.included_urls", "included_urls"), namespace="inc-ns3"
),
),
path(
"ns-included4/",
include("urlpatterns_reverse.namespace_urls", namespace="inc-ns4"),
),
]
|
842d994af8f393e9d20844b6904c11d82146d4496205b9a76f89519fd2b97119 | from django.urls import path
class ViewContainer:
def method_view(self, request):
pass
@classmethod
def classmethod_view(cls, request):
pass
view_container = ViewContainer()
urlpatterns = [
path("", view_container.method_view, name="instance-method-url"),
path("", ViewContainer.classmethod_view, name="instance-method-url"),
]
|
aa3241b4f770b2de3f34d007c993f73df473956913ffde45fa8e691ff48f127a | from django.http import HttpResponse
from django.template import Context, Template
from django.urls import path
def inner_view(request):
content = Template(
'{% url "outer" as outer_url %}outer:{{ outer_url }},'
'{% url "inner" as inner_url %}inner:{{ inner_url }}'
).render(Context())
return HttpResponse(content)
urlpatterns = [
path("second_test/", inner_view, name="inner"),
]
|
c95557ed3dd6240f54209931884f65d8961d61c2ed553df57a14f34e0e7acf5b | from django.urls import path, re_path
from .views import empty_view
urlpatterns = [
path("", empty_view, name="inner-nothing"),
re_path(r"extra/(?P<extra>\w+)/$", empty_view, name="inner-extra"),
re_path(r"(?P<one>[0-9]+)|(?P<two>[0-9]+)/$", empty_view, name="inner-disjunction"),
]
|
485770bd4695cae262503f80618ca0718f798c53cbed7bab785ba09c86ae33f4 | from django.urls import path, re_path
from . import views
class URLObject:
urlpatterns = [
path("inner/", views.empty_view, name="urlobject-view"),
re_path(
r"^inner/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$",
views.empty_view,
name="urlobject-view",
),
re_path(r"^inner/\+\\\$\*/$", views.empty_view, name="urlobject-special-view"),
]
def __init__(self, app_name, namespace=None):
self.app_name = app_name
self.namespace = namespace
@property
def urls(self):
return (self.urlpatterns, self.app_name), self.namespace
@property
def app_urls(self):
return self.urlpatterns, self.app_name
|
293a29cfdbebcc586370628c1977843ddbe383631a532cbc8656c3144635a3f3 | from django.urls import path, re_path
from .views import empty_view
urlpatterns = [
# No kwargs
path("conflict/cannot-go-here/", empty_view, name="name-conflict"),
path("conflict/", empty_view, name="name-conflict"),
# One kwarg
re_path(r"^conflict-first/(?P<first>\w+)/$", empty_view, name="name-conflict"),
re_path(
r"^conflict-cannot-go-here/(?P<middle>\w+)/$", empty_view, name="name-conflict"
),
re_path(r"^conflict-middle/(?P<middle>\w+)/$", empty_view, name="name-conflict"),
re_path(r"^conflict-last/(?P<last>\w+)/$", empty_view, name="name-conflict"),
# Two kwargs
re_path(
r"^conflict/(?P<another>\w+)/(?P<extra>\w+)/cannot-go-here/$",
empty_view,
name="name-conflict",
),
re_path(
r"^conflict/(?P<extra>\w+)/(?P<another>\w+)/$", empty_view, name="name-conflict"
),
]
|
8138702adda5e35ecdf852a9258f6e9323c1b711110fc76505d21c9f93d88605 | from django.urls import include, path, re_path
from .views import (
absolute_kwargs_view,
defaults_view,
empty_view,
empty_view_nested_partial,
empty_view_partial,
empty_view_wrapped,
nested_view,
)
other_patterns = [
path("non_path_include/", empty_view, name="non_path_include"),
path("nested_path/", nested_view),
]
urlpatterns = [
re_path(r"^places/([0-9]+)/$", empty_view, name="places"),
re_path(r"^places?/$", empty_view, name="places?"),
re_path(r"^places+/$", empty_view, name="places+"),
re_path(r"^places*/$", empty_view, name="places*"),
re_path(r"^(?:places/)?$", empty_view, name="places2?"),
re_path(r"^(?:places/)+$", empty_view, name="places2+"),
re_path(r"^(?:places/)*$", empty_view, name="places2*"),
re_path(r"^places/([0-9]+|[a-z_]+)/", empty_view, name="places3"),
re_path(r"^places/(?P<id>[0-9]+)/$", empty_view, name="places4"),
re_path(r"^people/(?P<name>\w+)/$", empty_view, name="people"),
re_path(r"^people/(?:name/)$", empty_view, name="people2"),
re_path(r"^people/(?:name/(\w+)/)?$", empty_view, name="people2a"),
re_path(r"^people/(?P<name>\w+)-(?P=name)/$", empty_view, name="people_backref"),
re_path(r"^optional/(?P<name>.*)/(?:.+/)?", empty_view, name="optional"),
re_path(
r"^optional/(?P<arg1>\d+)/(?:(?P<arg2>\d+)/)?",
absolute_kwargs_view,
name="named_optional",
),
re_path(
r"^optional/(?P<arg1>\d+)/(?:(?P<arg2>\d+)/)?$",
absolute_kwargs_view,
name="named_optional_terminated",
),
re_path(
r"^nested/noncapture/(?:(?P<p>\w+))$", empty_view, name="nested-noncapture"
),
re_path(r"^nested/capture/((\w+)/)?$", empty_view, name="nested-capture"),
re_path(
r"^nested/capture/mixed/((?P<p>\w+))$", empty_view, name="nested-mixedcapture"
),
re_path(
r"^nested/capture/named/(?P<outer>(?P<inner>\w+)/)?$",
empty_view,
name="nested-namedcapture",
),
re_path(r"^hardcoded/$", empty_view, name="hardcoded"),
re_path(r"^hardcoded/doc\.pdf$", empty_view, name="hardcoded2"),
re_path(r"^people/(?P<state>\w\w)/(?P<name>\w+)/$", empty_view, name="people3"),
re_path(r"^people/(?P<state>\w\w)/(?P<name>[0-9])/$", empty_view, name="people4"),
re_path(r"^people/((?P<state>\w\w)/test)?/(\w+)/$", empty_view, name="people6"),
re_path(r"^character_set/[abcdef0-9]/$", empty_view, name="range"),
re_path(r"^character_set/[\w]/$", empty_view, name="range2"),
re_path(r"^price/\$([0-9]+)/$", empty_view, name="price"),
re_path(r"^price/[$]([0-9]+)/$", empty_view, name="price2"),
re_path(r"^price/[\$]([0-9]+)/$", empty_view, name="price3"),
re_path(
r"^product/(?P<product>\w+)\+\(\$(?P<price>[0-9]+(\.[0-9]+)?)\)/$",
empty_view,
name="product",
),
re_path(
r"^headlines/(?P<year>[0-9]+)\.(?P<month>[0-9]+)\.(?P<day>[0-9]+)/$",
empty_view,
name="headlines",
),
re_path(
r"^windows_path/(?P<drive_name>[A-Z]):\\(?P<path>.+)/$",
empty_view,
name="windows",
),
re_path(r"^special_chars/(?P<chars>.+)/$", empty_view, name="special"),
re_path(r"^(?P<name>.+)/[0-9]+/$", empty_view, name="mixed"),
re_path(r"^repeats/a{1,2}/$", empty_view, name="repeats"),
re_path(r"^repeats/a{2,4}/$", empty_view, name="repeats2"),
re_path(r"^repeats/a{2}/$", empty_view, name="repeats3"),
re_path(r"^test/1/?", empty_view, name="test"),
re_path(r"^outer/(?P<outer>[0-9]+)/", include("urlpatterns_reverse.included_urls")),
re_path(
r"^outer-no-kwargs/([0-9]+)/",
include("urlpatterns_reverse.included_no_kwargs_urls"),
),
re_path("", include("urlpatterns_reverse.extra_urls")),
re_path(
r"^lookahead-/(?!not-a-city)(?P<city>[^/]+)/$",
empty_view,
name="lookahead-negative",
),
re_path(
r"^lookahead\+/(?=a-city)(?P<city>[^/]+)/$",
empty_view,
name="lookahead-positive",
),
re_path(
r"^lookbehind-/(?P<city>[^/]+)(?<!not-a-city)/$",
empty_view,
name="lookbehind-negative",
),
re_path(
r"^lookbehind\+/(?P<city>[^/]+)(?<=a-city)/$",
empty_view,
name="lookbehind-positive",
),
# Partials should be fine.
path("partial/", empty_view_partial, name="partial"),
path("partial_nested/", empty_view_nested_partial, name="partial_nested"),
path("partial_wrapped/", empty_view_wrapped, name="partial_wrapped"),
# This is non-reversible, but we shouldn't blow up when parsing it.
re_path(r"^(?:foo|bar)(\w+)/$", empty_view, name="disjunction"),
path("absolute_arg_view/", absolute_kwargs_view),
# Tests for #13154. Mixed syntax to test both ways of defining URLs.
re_path(
r"^defaults_view1/(?P<arg1>[0-9]+)/$",
defaults_view,
{"arg2": 1},
name="defaults",
),
re_path(
r"^defaults_view2/(?P<arg1>[0-9]+)/$", defaults_view, {"arg2": 2}, "defaults"
),
path("includes/", include(other_patterns)),
# Security tests
re_path("(.+)/security/$", empty_view, name="security"),
]
|
b2252a84700327260ca9109e545f474389ad57f03eddc0c9dfc48f5b5aa01068 | from django.urls import path, re_path
from . import views
app_name = "inc-app"
urlpatterns = [
path("normal/", views.empty_view, name="inc-normal-view"),
re_path(
"^normal/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$",
views.empty_view,
name="inc-normal-view",
),
re_path(r"^\+\\\$\*/$", views.empty_view, name="inc-special-view"),
re_path(
"^mixed_args/([0-9]+)/(?P<arg2>[0-9]+)/$",
views.empty_view,
name="inc-mixed-args",
),
re_path("^no_kwargs/([0-9]+)/([0-9]+)/$", views.empty_view, name="inc-no-kwargs"),
re_path(
"^view_class/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$",
views.view_class_instance,
name="inc-view-class",
),
]
|
145859fd6dc8a14efb45dc31b7ae1dbc867941470abbf3b941ac1a28054dd770 | from django.urls import path, re_path
from .views import empty_view
urlpatterns = [
path("", empty_view, name="named-url5"),
re_path(r"^extra/(?P<extra>\w+)/$", empty_view, name="named-url6"),
re_path(r"^(?P<one>[0-9]+)|(?P<two>[0-9]+)/$", empty_view),
]
|
1fa5777480c246e95ad18934c2d3bae3c0b8b9d288321be7fec419ec2ca346dc | from django.urls import include, path, re_path
from .views import empty_view
urlpatterns = [
path("", empty_view, name="named-url1"),
re_path(r"^extra/(?P<extra>\w+)/$", empty_view, name="named-url2"),
re_path(r"^(?P<one>[0-9]+)|(?P<two>[0-9]+)/$", empty_view),
path("included/", include("urlpatterns_reverse.included_named_urls")),
]
|
e4b09f747b64e6cab98fe6d819ae6372b0a4fbd0800a2ab77313815d1bdae718 | """
These URL patterns are included in two different ways in the main urls.py, with
an extra argument present in one case. Thus, there are two different ways for
each name to resolve and Django must distinguish the possibilities based on the
argument list.
"""
from django.urls import re_path
from .views import empty_view
urlpatterns = [
re_path(r"^part/(?P<value>\w+)/$", empty_view, name="part"),
re_path(r"^part2/(?:(?P<value>\w+)/)?$", empty_view, name="part2"),
]
|
5d72a45d05146f5b9d67170e12726b6e023b8af7ac3541b90b4eb220e895274f | from django.http import HttpResponse, StreamingHttpResponse
from django.urls import reverse
from django.utils.deprecation import MiddlewareMixin
from . import urlconf_inner
class ChangeURLconfMiddleware(MiddlewareMixin):
def process_request(self, request):
request.urlconf = urlconf_inner.__name__
class NullChangeURLconfMiddleware(MiddlewareMixin):
def process_request(self, request):
request.urlconf = None
class ReverseInnerInResponseMiddleware(MiddlewareMixin):
def process_response(self, *args, **kwargs):
return HttpResponse(reverse("inner"))
class ReverseOuterInResponseMiddleware(MiddlewareMixin):
def process_response(self, *args, **kwargs):
return HttpResponse(reverse("outer"))
class ReverseInnerInStreaming(MiddlewareMixin):
def process_view(self, *args, **kwargs):
def stream():
yield reverse("inner")
return StreamingHttpResponse(stream())
class ReverseOuterInStreaming(MiddlewareMixin):
def process_view(self, *args, **kwargs):
def stream():
yield reverse("outer")
return StreamingHttpResponse(stream())
|
22c59bfb002d1fd5ac76f38887029e39784905e3e4a5b4db8d0999b45628a404 | """
Some extra URL patterns that are included at the top level.
"""
from django.urls import include, path, re_path
from .views import empty_view
urlpatterns = [
re_path("^e-places/([0-9]+)/$", empty_view, name="extra-places"),
re_path(r"^e-people/(?P<name>\w+)/$", empty_view, name="extra-people"),
path("", include("urlpatterns_reverse.included_urls2")),
re_path(r"^prefix/(?P<prefix>\w+)/", include("urlpatterns_reverse.included_urls2")),
]
|
51864d4b8459390a4fa7887d0286bf5fb6918404a9071fa6fa40b60b5a767259 | from django.urls import path
from .views import LazyRedirectView, empty_view, login_required_view
urlpatterns = [
path("redirected_to/", empty_view, name="named-lazy-url-redirected-to"),
path("login/", empty_view, name="some-login-page"),
path("login_required_view/", login_required_view),
path("redirect/", LazyRedirectView.as_view()),
]
|
27e42821507b0fa303e0f5a2e3770b067340e6165a8ecf4d8c450712e611f588 | from functools import partial, update_wrapper
from django.contrib.auth.decorators import user_passes_test
from django.http import HttpResponse
from django.urls import reverse_lazy
from django.views.generic import RedirectView
def empty_view(request, *args, **kwargs):
return HttpResponse()
def absolute_kwargs_view(request, arg1=1, arg2=2):
return HttpResponse()
def defaults_view(request, arg1, arg2):
pass
def nested_view(request):
pass
def erroneous_view(request):
import non_existent # NOQA
def pass_resolver_match_view(request, *args, **kwargs):
response = HttpResponse()
response.resolver_match = request.resolver_match
return response
uncallable = None # neither a callable nor a string
class ViewClass:
def __call__(self, request, *args, **kwargs):
return HttpResponse()
view_class_instance = ViewClass()
class LazyRedirectView(RedirectView):
url = reverse_lazy("named-lazy-url-redirected-to")
@user_passes_test(
lambda u: u.is_authenticated, login_url=reverse_lazy("some-login-page")
)
def login_required_view(request):
return HttpResponse("Hello you")
def bad_view(request, *args, **kwargs):
raise ValueError("I don't think I'm getting good value for this view")
empty_view_partial = partial(empty_view, template_name="template.html")
empty_view_nested_partial = partial(
empty_view_partial, template_name="nested_partial.html"
)
empty_view_wrapped = update_wrapper(
partial(empty_view, template_name="template.html"),
empty_view,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.