hash
stringlengths
64
64
content
stringlengths
0
1.51M
237254f8e4dad7bc34e66d65b33ccd804f1e2ca04430d105537d313cd0508601
import datetime from django.test import TestCase, override_settings from django.utils import timezone from .models import Article, Category, Comment class DateTimesTests(TestCase): def test_related_model_traverse(self): a1 = Article.objects.create( title="First one", pub_date=datetime.datetime(2005, 7, 28, 9, 0, 0), ) a2 = Article.objects.create( title="Another one", pub_date=datetime.datetime(2010, 7, 28, 10, 0, 0), ) a3 = Article.objects.create( title="Third one, in the first day", pub_date=datetime.datetime(2005, 7, 28, 17, 0, 0), ) a1.comments.create( text="Im the HULK!", pub_date=datetime.datetime(2005, 7, 28, 9, 30, 0), ) a1.comments.create( text="HULK SMASH!", pub_date=datetime.datetime(2005, 7, 29, 1, 30, 0), ) a2.comments.create( text="LMAO", pub_date=datetime.datetime(2010, 7, 28, 10, 10, 10), ) a3.comments.create( text="+1", pub_date=datetime.datetime(2005, 8, 29, 10, 10, 10), ) c = Category.objects.create(name="serious-news") c.articles.add(a1, a3) self.assertSequenceEqual( Comment.objects.datetimes("article__pub_date", "year"), [ datetime.datetime(2005, 1, 1), datetime.datetime(2010, 1, 1), ], ) self.assertSequenceEqual( Comment.objects.datetimes("article__pub_date", "month"), [ datetime.datetime(2005, 7, 1), datetime.datetime(2010, 7, 1), ], ) self.assertSequenceEqual( Comment.objects.datetimes("article__pub_date", "week"), [ datetime.datetime(2005, 7, 25), datetime.datetime(2010, 7, 26), ], ) self.assertSequenceEqual( Comment.objects.datetimes("article__pub_date", "day"), [ datetime.datetime(2005, 7, 28), datetime.datetime(2010, 7, 28), ], ) self.assertSequenceEqual( Article.objects.datetimes("comments__pub_date", "day"), [ datetime.datetime(2005, 7, 28), datetime.datetime(2005, 7, 29), datetime.datetime(2005, 8, 29), datetime.datetime(2010, 7, 28), ], ) self.assertSequenceEqual( Article.objects.datetimes("comments__approval_date", "day"), [] ) self.assertSequenceEqual( Category.objects.datetimes("articles__pub_date", "day"), [ datetime.datetime(2005, 7, 28), ], ) @override_settings(USE_TZ=True) def test_21432(self): now = timezone.localtime(timezone.now().replace(microsecond=0)) Article.objects.create(title="First one", pub_date=now) qs = Article.objects.datetimes("pub_date", "second") self.assertEqual(qs[0], now) def test_datetimes_returns_available_dates_for_given_scope_and_given_field(self): pub_dates = [ datetime.datetime(2005, 7, 28, 12, 15), datetime.datetime(2005, 7, 29, 2, 15), datetime.datetime(2005, 7, 30, 5, 15), datetime.datetime(2005, 7, 31, 19, 15), ] for i, pub_date in enumerate(pub_dates): Article(pub_date=pub_date, title="title #{}".format(i)).save() self.assertSequenceEqual( Article.objects.datetimes("pub_date", "year"), [datetime.datetime(2005, 1, 1, 0, 0)], ) self.assertSequenceEqual( Article.objects.datetimes("pub_date", "month"), [datetime.datetime(2005, 7, 1, 0, 0)], ) self.assertSequenceEqual( Article.objects.datetimes("pub_date", "week"), [datetime.datetime(2005, 7, 25, 0, 0)], ) self.assertSequenceEqual( Article.objects.datetimes("pub_date", "day"), [ datetime.datetime(2005, 7, 28, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 31, 0, 0), ], ) self.assertSequenceEqual( Article.objects.datetimes("pub_date", "day", order="ASC"), [ datetime.datetime(2005, 7, 28, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 31, 0, 0), ], ) self.assertSequenceEqual( Article.objects.datetimes("pub_date", "day", order="DESC"), [ datetime.datetime(2005, 7, 31, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 28, 0, 0), ], ) def test_datetimes_has_lazy_iterator(self): pub_dates = [ datetime.datetime(2005, 7, 28, 12, 15), datetime.datetime(2005, 7, 29, 2, 15), datetime.datetime(2005, 7, 30, 5, 15), datetime.datetime(2005, 7, 31, 19, 15), ] for i, pub_date in enumerate(pub_dates): Article(pub_date=pub_date, title="title #{}".format(i)).save() # Use iterator() with datetimes() to return a generator that lazily # requests each result one at a time, to save memory. dates = [] with self.assertNumQueries(0): article_datetimes_iterator = Article.objects.datetimes( "pub_date", "day", order="DESC" ).iterator() with self.assertNumQueries(1): for article in article_datetimes_iterator: dates.append(article) self.assertEqual( dates, [ datetime.datetime(2005, 7, 31, 0, 0), datetime.datetime(2005, 7, 30, 0, 0), datetime.datetime(2005, 7, 29, 0, 0), datetime.datetime(2005, 7, 28, 0, 0), ], ) def test_datetimes_disallows_date_fields(self): dt = datetime.datetime(2005, 7, 28, 12, 15) Article.objects.create( pub_date=dt, published_on=dt.date(), title="Don't put dates into datetime functions!", ) with self.assertRaisesMessage( ValueError, "Cannot truncate DateField 'published_on' to DateTimeField" ): list(Article.objects.datetimes("published_on", "second")) def test_datetimes_fails_when_given_invalid_kind_argument(self): msg = ( "'kind' must be one of 'year', 'month', 'week', 'day', 'hour', " "'minute', or 'second'." ) with self.assertRaisesMessage(ValueError, msg): Article.objects.datetimes("pub_date", "bad_kind") def test_datetimes_fails_when_given_invalid_order_argument(self): msg = "'order' must be either 'ASC' or 'DESC'." with self.assertRaisesMessage(ValueError, msg): Article.objects.datetimes("pub_date", "year", order="bad order")
c481510a18cb77a70eeb56f9f4a9015c0182fc296254358e236a3e867e11bcb2
from unittest import mock from django.core.checks import Error from django.core.checks import Warning as DjangoWarning from django.db import connection, models from django.test.testcases import SimpleTestCase from django.test.utils import isolate_apps, modify_settings, override_settings @isolate_apps("invalid_models_tests") class RelativeFieldTests(SimpleTestCase): def test_valid_foreign_key_without_accessor(self): class Target(models.Model): # There would be a clash if Model.field installed an accessor. model = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, models.CASCADE, related_name="+") field = Model._meta.get_field("field") self.assertEqual(field.check(), []) def test_foreign_key_to_missing_model(self): # Model names are resolved when a model is being created, so we cannot # test relative fields in isolation and we need to attach them to a # model. class Model(models.Model): foreign_key = models.ForeignKey("Rel1", models.CASCADE) field = Model._meta.get_field("foreign_key") self.assertEqual( field.check(), [ Error( "Field defines a relation with model 'Rel1', " "which is either not installed, or is abstract.", obj=field, id="fields.E300", ), ], ) @isolate_apps("invalid_models_tests") def test_foreign_key_to_isolate_apps_model(self): """ #25723 - Referenced model registration lookup should be run against the field's model registry. """ class OtherModel(models.Model): pass class Model(models.Model): foreign_key = models.ForeignKey("OtherModel", models.CASCADE) field = Model._meta.get_field("foreign_key") self.assertEqual(field.check(from_model=Model), []) def test_many_to_many_to_missing_model(self): class Model(models.Model): m2m = models.ManyToManyField("Rel2") field = Model._meta.get_field("m2m") self.assertEqual( field.check(from_model=Model), [ Error( "Field defines a relation with model 'Rel2', " "which is either not installed, or is abstract.", obj=field, id="fields.E300", ), ], ) @isolate_apps("invalid_models_tests") def test_many_to_many_to_isolate_apps_model(self): """ #25723 - Referenced model registration lookup should be run against the field's model registry. """ class OtherModel(models.Model): pass class Model(models.Model): m2m = models.ManyToManyField("OtherModel") field = Model._meta.get_field("m2m") self.assertEqual(field.check(from_model=Model), []) def test_many_to_many_with_useless_options(self): class Model(models.Model): name = models.CharField(max_length=20) class ModelM2M(models.Model): m2m = models.ManyToManyField( Model, null=True, validators=[lambda x: x], db_comment="Column comment" ) field = ModelM2M._meta.get_field("m2m") self.assertEqual( ModelM2M.check(), [ DjangoWarning( "null has no effect on ManyToManyField.", obj=field, id="fields.W340", ), DjangoWarning( "ManyToManyField does not support validators.", obj=field, id="fields.W341", ), DjangoWarning( "db_comment has no effect on ManyToManyField.", obj=field, id="fields.W346", ), ], ) def test_many_to_many_with_useless_related_name(self): class ModelM2M(models.Model): m2m = models.ManyToManyField("self", related_name="children") field = ModelM2M._meta.get_field("m2m") self.assertEqual( ModelM2M.check(), [ DjangoWarning( "related_name has no effect on ManyToManyField with " 'a symmetrical relationship, e.g. to "self".', obj=field, id="fields.W345", ), ], ) def test_ambiguous_relationship_model_from(self): class Person(models.Model): pass class Group(models.Model): field = models.ManyToManyField("Person", through="AmbiguousRelationship") class AmbiguousRelationship(models.Model): person = models.ForeignKey(Person, models.CASCADE) first_group = models.ForeignKey(Group, models.CASCADE, related_name="first") second_group = models.ForeignKey( Group, models.CASCADE, related_name="second" ) field = Group._meta.get_field("field") self.assertEqual( field.check(from_model=Group), [ Error( "The model is used as an intermediate model by " "'invalid_models_tests.Group.field', but it has more than one " "foreign key from 'Group', which is ambiguous. You must " "specify which foreign key Django should use via the " "through_fields keyword argument.", hint=( "If you want to create a recursive relationship, use " 'ManyToManyField("self", through="AmbiguousRelationship").' ), obj=field, id="fields.E334", ), ], ) def test_ambiguous_relationship_model_to(self): class Person(models.Model): pass class Group(models.Model): field = models.ManyToManyField( "Person", through="AmbiguousRelationship", related_name="tertiary" ) class AmbiguousRelationship(models.Model): # Too much foreign keys to Person. first_person = models.ForeignKey( Person, models.CASCADE, related_name="first" ) second_person = models.ForeignKey( Person, models.CASCADE, related_name="second" ) second_model = models.ForeignKey(Group, models.CASCADE) field = Group._meta.get_field("field") self.assertEqual( field.check(from_model=Group), [ Error( "The model is used as an intermediate model by " "'invalid_models_tests.Group.field', but it has more than one " "foreign key to 'Person', which is ambiguous. You must specify " "which foreign key Django should use via the through_fields " "keyword argument.", hint=( "If you want to create a recursive relationship, use " 'ManyToManyField("self", through="AmbiguousRelationship").' ), obj=field, id="fields.E335", ), ], ) def test_relationship_model_with_foreign_key_to_wrong_model(self): class WrongModel(models.Model): pass class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField("Person", through="InvalidRelationship") class InvalidRelationship(models.Model): person = models.ForeignKey(Person, models.CASCADE) wrong_foreign_key = models.ForeignKey(WrongModel, models.CASCADE) # The last foreign key should point to Group model. field = Group._meta.get_field("members") self.assertEqual( field.check(from_model=Group), [ Error( "The model is used as an intermediate model by " "'invalid_models_tests.Group.members', but it does not " "have a foreign key to 'Group' or 'Person'.", obj=InvalidRelationship, id="fields.E336", ), ], ) def test_relationship_model_missing_foreign_key(self): class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField("Person", through="InvalidRelationship") class InvalidRelationship(models.Model): group = models.ForeignKey(Group, models.CASCADE) # No foreign key to Person field = Group._meta.get_field("members") self.assertEqual( field.check(from_model=Group), [ Error( "The model is used as an intermediate model by " "'invalid_models_tests.Group.members', but it does not have " "a foreign key to 'Group' or 'Person'.", obj=InvalidRelationship, id="fields.E336", ), ], ) def test_missing_relationship_model(self): class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField("Person", through="MissingM2MModel") field = Group._meta.get_field("members") self.assertEqual( field.check(from_model=Group), [ Error( "Field specifies a many-to-many relation through model " "'MissingM2MModel', which has not been installed.", obj=field, id="fields.E331", ), ], ) def test_missing_relationship_model_on_model_check(self): class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField("Person", through="MissingM2MModel") self.assertEqual( Group.check(), [ Error( "Field specifies a many-to-many relation through model " "'MissingM2MModel', which has not been installed.", obj=Group._meta.get_field("members"), id="fields.E331", ), ], ) @isolate_apps("invalid_models_tests") def test_many_to_many_through_isolate_apps_model(self): """ #25723 - Through model registration lookup should be run against the field's model registry. """ class GroupMember(models.Model): person = models.ForeignKey("Person", models.CASCADE) group = models.ForeignKey("Group", models.CASCADE) class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField("Person", through="GroupMember") field = Group._meta.get_field("members") self.assertEqual(field.check(from_model=Group), []) def test_too_many_foreign_keys_in_self_referential_model(self): class Person(models.Model): friends = models.ManyToManyField( "self", through="InvalidRelationship", symmetrical=False ) class InvalidRelationship(models.Model): first = models.ForeignKey( Person, models.CASCADE, related_name="rel_from_set_2" ) second = models.ForeignKey( Person, models.CASCADE, related_name="rel_to_set_2" ) third = models.ForeignKey( Person, models.CASCADE, related_name="too_many_by_far" ) field = Person._meta.get_field("friends") self.assertEqual( field.check(from_model=Person), [ Error( "The model is used as an intermediate model by " "'invalid_models_tests.Person.friends', but it has more than two " "foreign keys to 'Person', which is ambiguous. You must specify " "which two foreign keys Django should use via the through_fields " "keyword argument.", hint=( "Use through_fields to specify which two foreign keys Django " "should use." ), obj=InvalidRelationship, id="fields.E333", ), ], ) def test_foreign_key_to_abstract_model(self): class AbstractModel(models.Model): class Meta: abstract = True class Model(models.Model): rel_string_foreign_key = models.ForeignKey("AbstractModel", models.CASCADE) rel_class_foreign_key = models.ForeignKey(AbstractModel, models.CASCADE) fields = [ Model._meta.get_field("rel_string_foreign_key"), Model._meta.get_field("rel_class_foreign_key"), ] expected_error = Error( "Field defines a relation with model 'AbstractModel', " "which is either not installed, or is abstract.", id="fields.E300", ) for field in fields: expected_error.obj = field self.assertEqual(field.check(), [expected_error]) def test_m2m_to_abstract_model(self): class AbstractModel(models.Model): class Meta: abstract = True class Model(models.Model): rel_string_m2m = models.ManyToManyField("AbstractModel") rel_class_m2m = models.ManyToManyField(AbstractModel) fields = [ Model._meta.get_field("rel_string_m2m"), Model._meta.get_field("rel_class_m2m"), ] expected_error = Error( "Field defines a relation with model 'AbstractModel', " "which is either not installed, or is abstract.", id="fields.E300", ) for field in fields: expected_error.obj = field self.assertEqual(field.check(from_model=Model), [expected_error]) def test_unique_m2m(self): class Person(models.Model): name = models.CharField(max_length=5) class Group(models.Model): members = models.ManyToManyField("Person", unique=True) field = Group._meta.get_field("members") self.assertEqual( field.check(from_model=Group), [ Error( "ManyToManyFields cannot be unique.", obj=field, id="fields.E330", ), ], ) def test_foreign_key_to_non_unique_field(self): class Target(models.Model): bad = models.IntegerField() # No unique=True class Model(models.Model): foreign_key = models.ForeignKey("Target", models.CASCADE, to_field="bad") field = Model._meta.get_field("foreign_key") self.assertEqual( field.check(), [ Error( "'Target.bad' must be unique because it is referenced by a foreign " "key.", hint=( "Add unique=True to this field or add a UniqueConstraint " "(without condition) in the model Meta.constraints." ), obj=field, id="fields.E311", ), ], ) def test_foreign_key_to_non_unique_field_under_explicit_model(self): class Target(models.Model): bad = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, models.CASCADE, to_field="bad") field = Model._meta.get_field("field") self.assertEqual( field.check(), [ Error( "'Target.bad' must be unique because it is referenced by a foreign " "key.", hint=( "Add unique=True to this field or add a UniqueConstraint " "(without condition) in the model Meta.constraints." ), obj=field, id="fields.E311", ), ], ) def test_foreign_key_to_partially_unique_field(self): class Target(models.Model): source = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=["source"], name="tfktpuf_partial_unique", condition=models.Q(pk__gt=2), ), ] class Model(models.Model): field = models.ForeignKey(Target, models.CASCADE, to_field="source") field = Model._meta.get_field("field") self.assertEqual( field.check(), [ Error( "'Target.source' must be unique because it is referenced by a " "foreign key.", hint=( "Add unique=True to this field or add a UniqueConstraint " "(without condition) in the model Meta.constraints." ), obj=field, id="fields.E311", ), ], ) def test_foreign_key_to_unique_field_with_meta_constraint(self): class Target(models.Model): source = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=["source"], name="tfktufwmc_unique", ), ] class Model(models.Model): field = models.ForeignKey(Target, models.CASCADE, to_field="source") field = Model._meta.get_field("field") self.assertEqual(field.check(), []) def test_foreign_object_to_non_unique_fields(self): class Person(models.Model): # Note that both fields are not unique. country_id = models.IntegerField() city_id = models.IntegerField() class MMembership(models.Model): person_country_id = models.IntegerField() person_city_id = models.IntegerField() person = models.ForeignObject( Person, on_delete=models.CASCADE, from_fields=["person_country_id", "person_city_id"], to_fields=["country_id", "city_id"], ) field = MMembership._meta.get_field("person") self.assertEqual( field.check(), [ Error( "No subset of the fields 'country_id', 'city_id' on model 'Person' " "is unique.", hint=( "Mark a single field as unique=True or add a set of " "fields to a unique constraint (via unique_together or a " "UniqueConstraint (without condition) in the model " "Meta.constraints)." ), obj=field, id="fields.E310", ) ], ) def test_foreign_object_to_partially_unique_field(self): class Person(models.Model): country_id = models.IntegerField() city_id = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=["country_id", "city_id"], name="tfotpuf_partial_unique", condition=models.Q(pk__gt=2), ), ] class MMembership(models.Model): person_country_id = models.IntegerField() person_city_id = models.IntegerField() person = models.ForeignObject( Person, on_delete=models.CASCADE, from_fields=["person_country_id", "person_city_id"], to_fields=["country_id", "city_id"], ) field = MMembership._meta.get_field("person") self.assertEqual( field.check(), [ Error( "No subset of the fields 'country_id', 'city_id' on model " "'Person' is unique.", hint=( "Mark a single field as unique=True or add a set of " "fields to a unique constraint (via unique_together or a " "UniqueConstraint (without condition) in the model " "Meta.constraints)." ), obj=field, id="fields.E310", ), ], ) def test_foreign_object_to_unique_field_with_meta_constraint(self): class Person(models.Model): country_id = models.IntegerField() city_id = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=["country_id", "city_id"], name="tfotpuf_unique", ), ] class MMembership(models.Model): person_country_id = models.IntegerField() person_city_id = models.IntegerField() person = models.ForeignObject( Person, on_delete=models.CASCADE, from_fields=["person_country_id", "person_city_id"], to_fields=["country_id", "city_id"], ) field = MMembership._meta.get_field("person") self.assertEqual(field.check(), []) def test_on_delete_set_null_on_non_nullable_field(self): class Person(models.Model): pass class Model(models.Model): foreign_key = models.ForeignKey("Person", models.SET_NULL) field = Model._meta.get_field("foreign_key") self.assertEqual( field.check(), [ Error( "Field specifies on_delete=SET_NULL, but cannot be null.", hint=( "Set null=True argument on the field, or change the on_delete " "rule." ), obj=field, id="fields.E320", ), ], ) def test_on_delete_set_default_without_default_value(self): class Person(models.Model): pass class Model(models.Model): foreign_key = models.ForeignKey("Person", models.SET_DEFAULT) field = Model._meta.get_field("foreign_key") self.assertEqual( field.check(), [ Error( "Field specifies on_delete=SET_DEFAULT, but has no default value.", hint="Set a default value, or change the on_delete rule.", obj=field, id="fields.E321", ), ], ) def test_nullable_primary_key(self): class Model(models.Model): field = models.IntegerField(primary_key=True, null=True) field = Model._meta.get_field("field") with mock.patch.object( connection.features, "interprets_empty_strings_as_nulls", False ): results = field.check() self.assertEqual( results, [ Error( "Primary keys must not have null=True.", hint=( "Set null=False on the field, or remove primary_key=True " "argument." ), obj=field, id="fields.E007", ), ], ) def test_not_swapped_model(self): class SwappableModel(models.Model): # A model that can be, but isn't swapped out. References to this # model should *not* raise any validation error. class Meta: swappable = "TEST_SWAPPABLE_MODEL" class Model(models.Model): explicit_fk = models.ForeignKey( SwappableModel, models.CASCADE, related_name="explicit_fk", ) implicit_fk = models.ForeignKey( "invalid_models_tests.SwappableModel", models.CASCADE, related_name="implicit_fk", ) explicit_m2m = models.ManyToManyField( SwappableModel, related_name="explicit_m2m" ) implicit_m2m = models.ManyToManyField( "invalid_models_tests.SwappableModel", related_name="implicit_m2m", ) explicit_fk = Model._meta.get_field("explicit_fk") self.assertEqual(explicit_fk.check(), []) implicit_fk = Model._meta.get_field("implicit_fk") self.assertEqual(implicit_fk.check(), []) explicit_m2m = Model._meta.get_field("explicit_m2m") self.assertEqual(explicit_m2m.check(from_model=Model), []) implicit_m2m = Model._meta.get_field("implicit_m2m") self.assertEqual(implicit_m2m.check(from_model=Model), []) @override_settings(TEST_SWAPPED_MODEL="invalid_models_tests.Replacement") def test_referencing_to_swapped_model(self): class Replacement(models.Model): pass class SwappedModel(models.Model): class Meta: swappable = "TEST_SWAPPED_MODEL" class Model(models.Model): explicit_fk = models.ForeignKey( SwappedModel, models.CASCADE, related_name="explicit_fk", ) implicit_fk = models.ForeignKey( "invalid_models_tests.SwappedModel", models.CASCADE, related_name="implicit_fk", ) explicit_m2m = models.ManyToManyField( SwappedModel, related_name="explicit_m2m" ) implicit_m2m = models.ManyToManyField( "invalid_models_tests.SwappedModel", related_name="implicit_m2m", ) fields = [ Model._meta.get_field("explicit_fk"), Model._meta.get_field("implicit_fk"), Model._meta.get_field("explicit_m2m"), Model._meta.get_field("implicit_m2m"), ] expected_error = Error( ( "Field defines a relation with the model " "'invalid_models_tests.SwappedModel', which has been swapped out." ), hint="Update the relation to point at 'settings.TEST_SWAPPED_MODEL'.", id="fields.E301", ) for field in fields: expected_error.obj = field self.assertEqual(field.check(from_model=Model), [expected_error]) def test_related_field_has_invalid_related_name(self): digit = 0 illegal_non_alphanumeric = "!" whitespace = "\t" invalid_related_names = [ "%s_begins_with_digit" % digit, "%s_begins_with_illegal_non_alphanumeric" % illegal_non_alphanumeric, "%s_begins_with_whitespace" % whitespace, "contains_%s_illegal_non_alphanumeric" % illegal_non_alphanumeric, "contains_%s_whitespace" % whitespace, "ends_with_with_illegal_non_alphanumeric_%s" % illegal_non_alphanumeric, "ends_with_whitespace_%s" % whitespace, "with", # a Python keyword "related_name\n", "", ",", # non-ASCII ] class Parent(models.Model): pass for invalid_related_name in invalid_related_names: Child = type( "Child%s" % invalid_related_name, (models.Model,), { "parent": models.ForeignKey( "Parent", models.CASCADE, related_name=invalid_related_name ), "__module__": Parent.__module__, }, ) field = Child._meta.get_field("parent") self.assertEqual( Child.check(), [ Error( "The name '%s' is invalid related_name for field Child%s.parent" % (invalid_related_name, invalid_related_name), hint=( "Related name must be a valid Python identifier or end " "with a '+'" ), obj=field, id="fields.E306", ), ], ) def test_related_field_has_valid_related_name(self): lowercase = "a" uppercase = "A" digit = 0 related_names = [ "%s_starts_with_lowercase" % lowercase, "%s_tarts_with_uppercase" % uppercase, "_starts_with_underscore", "contains_%s_digit" % digit, "ends_with_plus+", "_+", "+", "試", "試驗+", ] class Parent(models.Model): pass for related_name in related_names: Child = type( "Child%s" % related_name, (models.Model,), { "parent": models.ForeignKey( "Parent", models.CASCADE, related_name=related_name ), "__module__": Parent.__module__, }, ) self.assertEqual(Child.check(), []) def test_to_fields_exist(self): class Parent(models.Model): pass class Child(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() parent = models.ForeignObject( Parent, on_delete=models.SET_NULL, from_fields=("a", "b"), to_fields=("a", "b"), ) field = Child._meta.get_field("parent") self.assertEqual( field.check(), [ Error( "The to_field 'a' doesn't exist on the related model " "'invalid_models_tests.Parent'.", obj=field, id="fields.E312", ), Error( "The to_field 'b' doesn't exist on the related model " "'invalid_models_tests.Parent'.", obj=field, id="fields.E312", ), ], ) def test_to_fields_not_checked_if_related_model_doesnt_exist(self): class Child(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() parent = models.ForeignObject( "invalid_models_tests.Parent", on_delete=models.SET_NULL, from_fields=("a", "b"), to_fields=("a", "b"), ) field = Child._meta.get_field("parent") self.assertEqual( field.check(), [ Error( "Field defines a relation with model " "'invalid_models_tests.Parent', which is either not installed, or " "is abstract.", id="fields.E300", obj=field, ), ], ) def test_invalid_related_query_name(self): class Target(models.Model): pass class Model(models.Model): first = models.ForeignKey( Target, models.CASCADE, related_name="contains__double" ) second = models.ForeignKey( Target, models.CASCADE, related_query_name="ends_underscore_" ) self.assertEqual( Model.check(), [ Error( "Reverse query name 'contains__double' must not contain '__'.", hint=( "Add or change a related_name or related_query_name " "argument for this field." ), obj=Model._meta.get_field("first"), id="fields.E309", ), Error( "Reverse query name 'ends_underscore_' must not end with an " "underscore.", hint=( "Add or change a related_name or related_query_name " "argument for this field." ), obj=Model._meta.get_field("second"), id="fields.E308", ), ], ) @isolate_apps("invalid_models_tests") class AccessorClashTests(SimpleTestCase): def test_fk_to_integer(self): self._test_accessor_clash( target=models.IntegerField(), relative=models.ForeignKey("Target", models.CASCADE), ) def test_fk_to_fk(self): self._test_accessor_clash( target=models.ForeignKey("Another", models.CASCADE), relative=models.ForeignKey("Target", models.CASCADE), ) def test_fk_to_m2m(self): self._test_accessor_clash( target=models.ManyToManyField("Another"), relative=models.ForeignKey("Target", models.CASCADE), ) def test_m2m_to_integer(self): self._test_accessor_clash( target=models.IntegerField(), relative=models.ManyToManyField("Target") ) def test_m2m_to_fk(self): self._test_accessor_clash( target=models.ForeignKey("Another", models.CASCADE), relative=models.ManyToManyField("Target"), ) def test_m2m_to_m2m(self): self._test_accessor_clash( target=models.ManyToManyField("Another"), relative=models.ManyToManyField("Target"), ) def _test_accessor_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): model_set = target class Model(models.Model): rel = relative self.assertEqual( Model.check(), [ Error( "Reverse accessor 'Target.model_set' for " "'invalid_models_tests.Model.rel' clashes with field name " "'invalid_models_tests.Target.model_set'.", hint=( "Rename field 'invalid_models_tests.Target.model_set', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.rel'." ), obj=Model._meta.get_field("rel"), id="fields.E302", ), ], ) def test_clash_between_accessors(self): class Target(models.Model): pass class Model(models.Model): foreign = models.ForeignKey(Target, models.CASCADE) m2m = models.ManyToManyField(Target) self.assertEqual( Model.check(), [ Error( "Reverse accessor 'Target.model_set' for " "'invalid_models_tests.Model.foreign' clashes with reverse " "accessor for 'invalid_models_tests.Model.m2m'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.foreign' or " "'invalid_models_tests.Model.m2m'." ), obj=Model._meta.get_field("foreign"), id="fields.E304", ), Error( "Reverse accessor 'Target.model_set' for " "'invalid_models_tests.Model.m2m' clashes with reverse " "accessor for 'invalid_models_tests.Model.foreign'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.m2m' or " "'invalid_models_tests.Model.foreign'." ), obj=Model._meta.get_field("m2m"), id="fields.E304", ), ], ) def test_m2m_to_m2m_with_inheritance(self): """Ref #22047.""" class Target(models.Model): pass class Model(models.Model): children = models.ManyToManyField( "Child", related_name="m2m_clash", related_query_name="no_clash" ) class Parent(models.Model): m2m_clash = models.ManyToManyField("Target") class Child(Parent): pass self.assertEqual( Model.check(), [ Error( "Reverse accessor 'Child.m2m_clash' for " "'invalid_models_tests.Model.children' clashes with field " "name 'invalid_models_tests.Child.m2m_clash'.", hint=( "Rename field 'invalid_models_tests.Child.m2m_clash', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.children'." ), obj=Model._meta.get_field("children"), id="fields.E302", ) ], ) def test_no_clash_for_hidden_related_name(self): class Stub(models.Model): pass class ManyToManyRel(models.Model): thing1 = models.ManyToManyField(Stub, related_name="+") thing2 = models.ManyToManyField(Stub, related_name="+") class FKRel(models.Model): thing1 = models.ForeignKey(Stub, models.CASCADE, related_name="+") thing2 = models.ForeignKey(Stub, models.CASCADE, related_name="+") self.assertEqual(ManyToManyRel.check(), []) self.assertEqual(FKRel.check(), []) @isolate_apps("invalid_models_tests") class ReverseQueryNameClashTests(SimpleTestCase): def test_fk_to_integer(self): self._test_reverse_query_name_clash( target=models.IntegerField(), relative=models.ForeignKey("Target", models.CASCADE), ) def test_fk_to_fk(self): self._test_reverse_query_name_clash( target=models.ForeignKey("Another", models.CASCADE), relative=models.ForeignKey("Target", models.CASCADE), ) def test_fk_to_m2m(self): self._test_reverse_query_name_clash( target=models.ManyToManyField("Another"), relative=models.ForeignKey("Target", models.CASCADE), ) def test_m2m_to_integer(self): self._test_reverse_query_name_clash( target=models.IntegerField(), relative=models.ManyToManyField("Target") ) def test_m2m_to_fk(self): self._test_reverse_query_name_clash( target=models.ForeignKey("Another", models.CASCADE), relative=models.ManyToManyField("Target"), ) def test_m2m_to_m2m(self): self._test_reverse_query_name_clash( target=models.ManyToManyField("Another"), relative=models.ManyToManyField("Target"), ) def _test_reverse_query_name_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): model = target class Model(models.Model): rel = relative self.assertEqual( Model.check(), [ Error( "Reverse query name for 'invalid_models_tests.Model.rel' " "clashes with field name 'invalid_models_tests.Target.model'.", hint=( "Rename field 'invalid_models_tests.Target.model', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.rel'." ), obj=Model._meta.get_field("rel"), id="fields.E303", ), ], ) @modify_settings(INSTALLED_APPS={"append": "basic"}) @isolate_apps("basic", "invalid_models_tests") def test_no_clash_across_apps_without_accessor(self): class Target(models.Model): class Meta: app_label = "invalid_models_tests" class Model(models.Model): m2m = models.ManyToManyField(Target, related_name="+") class Meta: app_label = "basic" def _test(): # Define model with the same name. class Model(models.Model): m2m = models.ManyToManyField(Target, related_name="+") class Meta: app_label = "invalid_models_tests" self.assertEqual(Model.check(), []) _test() self.assertEqual(Model.check(), []) @isolate_apps("invalid_models_tests") class ExplicitRelatedNameClashTests(SimpleTestCase): def test_fk_to_integer(self): self._test_explicit_related_name_clash( target=models.IntegerField(), relative=models.ForeignKey("Target", models.CASCADE, related_name="clash"), ) def test_fk_to_fk(self): self._test_explicit_related_name_clash( target=models.ForeignKey("Another", models.CASCADE), relative=models.ForeignKey("Target", models.CASCADE, related_name="clash"), ) def test_fk_to_m2m(self): self._test_explicit_related_name_clash( target=models.ManyToManyField("Another"), relative=models.ForeignKey("Target", models.CASCADE, related_name="clash"), ) def test_m2m_to_integer(self): self._test_explicit_related_name_clash( target=models.IntegerField(), relative=models.ManyToManyField("Target", related_name="clash"), ) def test_m2m_to_fk(self): self._test_explicit_related_name_clash( target=models.ForeignKey("Another", models.CASCADE), relative=models.ManyToManyField("Target", related_name="clash"), ) def test_m2m_to_m2m(self): self._test_explicit_related_name_clash( target=models.ManyToManyField("Another"), relative=models.ManyToManyField("Target", related_name="clash"), ) def _test_explicit_related_name_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): clash = target class Model(models.Model): rel = relative self.assertEqual( Model.check(), [ Error( "Reverse accessor 'Target.clash' for " "'invalid_models_tests.Model.rel' clashes with field name " "'invalid_models_tests.Target.clash'.", hint=( "Rename field 'invalid_models_tests.Target.clash', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.rel'." ), obj=Model._meta.get_field("rel"), id="fields.E302", ), Error( "Reverse query name for 'invalid_models_tests.Model.rel' " "clashes with field name 'invalid_models_tests.Target.clash'.", hint=( "Rename field 'invalid_models_tests.Target.clash', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.rel'." ), obj=Model._meta.get_field("rel"), id="fields.E303", ), ], ) @isolate_apps("invalid_models_tests") class ExplicitRelatedQueryNameClashTests(SimpleTestCase): def test_fk_to_integer(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.IntegerField(), relative=models.ForeignKey( "Target", models.CASCADE, related_name=related_name, related_query_name="clash", ), ) def test_hidden_fk_to_integer(self, related_name=None): self.test_fk_to_integer(related_name="+") def test_fk_to_fk(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.ForeignKey("Another", models.CASCADE), relative=models.ForeignKey( "Target", models.CASCADE, related_name=related_name, related_query_name="clash", ), ) def test_hidden_fk_to_fk(self): self.test_fk_to_fk(related_name="+") def test_fk_to_m2m(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.ManyToManyField("Another"), relative=models.ForeignKey( "Target", models.CASCADE, related_name=related_name, related_query_name="clash", ), ) def test_hidden_fk_to_m2m(self): self.test_fk_to_m2m(related_name="+") def test_m2m_to_integer(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.IntegerField(), relative=models.ManyToManyField( "Target", related_name=related_name, related_query_name="clash" ), ) def test_hidden_m2m_to_integer(self): self.test_m2m_to_integer(related_name="+") def test_m2m_to_fk(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.ForeignKey("Another", models.CASCADE), relative=models.ManyToManyField( "Target", related_name=related_name, related_query_name="clash" ), ) def test_hidden_m2m_to_fk(self): self.test_m2m_to_fk(related_name="+") def test_m2m_to_m2m(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.ManyToManyField("Another"), relative=models.ManyToManyField( "Target", related_name=related_name, related_query_name="clash", ), ) def test_hidden_m2m_to_m2m(self): self.test_m2m_to_m2m(related_name="+") def _test_explicit_related_query_name_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): clash = target class Model(models.Model): rel = relative self.assertEqual( Model.check(), [ Error( "Reverse query name for 'invalid_models_tests.Model.rel' " "clashes with field name 'invalid_models_tests.Target.clash'.", hint=( "Rename field 'invalid_models_tests.Target.clash', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.rel'." ), obj=Model._meta.get_field("rel"), id="fields.E303", ), ], ) @isolate_apps("invalid_models_tests") class SelfReferentialM2MClashTests(SimpleTestCase): def test_clash_between_accessors(self): class Model(models.Model): first_m2m = models.ManyToManyField("self", symmetrical=False) second_m2m = models.ManyToManyField("self", symmetrical=False) self.assertEqual( Model.check(), [ Error( "Reverse accessor 'Model.model_set' for " "'invalid_models_tests.Model.first_m2m' clashes with reverse " "accessor for 'invalid_models_tests.Model.second_m2m'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.first_m2m' or " "'invalid_models_tests.Model.second_m2m'." ), obj=Model._meta.get_field("first_m2m"), id="fields.E304", ), Error( "Reverse accessor 'Model.model_set' for " "'invalid_models_tests.Model.second_m2m' clashes with reverse " "accessor for 'invalid_models_tests.Model.first_m2m'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.second_m2m' or " "'invalid_models_tests.Model.first_m2m'." ), obj=Model._meta.get_field("second_m2m"), id="fields.E304", ), ], ) def test_accessor_clash(self): class Model(models.Model): model_set = models.ManyToManyField("self", symmetrical=False) self.assertEqual( Model.check(), [ Error( "Reverse accessor 'Model.model_set' for " "'invalid_models_tests.Model.model_set' clashes with field " "name 'invalid_models_tests.Model.model_set'.", hint=( "Rename field 'invalid_models_tests.Model.model_set', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.model_set'." ), obj=Model._meta.get_field("model_set"), id="fields.E302", ), ], ) def test_reverse_query_name_clash(self): class Model(models.Model): model = models.ManyToManyField("self", symmetrical=False) self.assertEqual( Model.check(), [ Error( "Reverse query name for 'invalid_models_tests.Model.model' " "clashes with field name 'invalid_models_tests.Model.model'.", hint=( "Rename field 'invalid_models_tests.Model.model', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.model'." ), obj=Model._meta.get_field("model"), id="fields.E303", ), ], ) def test_clash_under_explicit_related_name(self): class Model(models.Model): clash = models.IntegerField() m2m = models.ManyToManyField( "self", symmetrical=False, related_name="clash" ) self.assertEqual( Model.check(), [ Error( "Reverse accessor 'Model.clash' for " "'invalid_models_tests.Model.m2m' clashes with field name " "'invalid_models_tests.Model.clash'.", hint=( "Rename field 'invalid_models_tests.Model.clash', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.m2m'." ), obj=Model._meta.get_field("m2m"), id="fields.E302", ), Error( "Reverse query name for 'invalid_models_tests.Model.m2m' " "clashes with field name 'invalid_models_tests.Model.clash'.", hint=( "Rename field 'invalid_models_tests.Model.clash', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.m2m'." ), obj=Model._meta.get_field("m2m"), id="fields.E303", ), ], ) def test_valid_model(self): class Model(models.Model): first = models.ManyToManyField( "self", symmetrical=False, related_name="first_accessor" ) second = models.ManyToManyField( "self", symmetrical=False, related_name="second_accessor" ) self.assertEqual(Model.check(), []) @isolate_apps("invalid_models_tests") class SelfReferentialFKClashTests(SimpleTestCase): def test_accessor_clash(self): class Model(models.Model): model_set = models.ForeignKey("Model", models.CASCADE) self.assertEqual( Model.check(), [ Error( "Reverse accessor 'Model.model_set' for " "'invalid_models_tests.Model.model_set' clashes with field " "name 'invalid_models_tests.Model.model_set'.", hint=( "Rename field 'invalid_models_tests.Model.model_set', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.model_set'." ), obj=Model._meta.get_field("model_set"), id="fields.E302", ), ], ) def test_reverse_query_name_clash(self): class Model(models.Model): model = models.ForeignKey("Model", models.CASCADE) self.assertEqual( Model.check(), [ Error( "Reverse query name for 'invalid_models_tests.Model.model' " "clashes with field name 'invalid_models_tests.Model.model'.", hint=( "Rename field 'invalid_models_tests.Model.model', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.model'." ), obj=Model._meta.get_field("model"), id="fields.E303", ), ], ) def test_clash_under_explicit_related_name(self): class Model(models.Model): clash = models.CharField(max_length=10) foreign = models.ForeignKey("Model", models.CASCADE, related_name="clash") self.assertEqual( Model.check(), [ Error( "Reverse accessor 'Model.clash' for " "'invalid_models_tests.Model.foreign' clashes with field name " "'invalid_models_tests.Model.clash'.", hint=( "Rename field 'invalid_models_tests.Model.clash', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.foreign'." ), obj=Model._meta.get_field("foreign"), id="fields.E302", ), Error( "Reverse query name for 'invalid_models_tests.Model.foreign' " "clashes with field name 'invalid_models_tests.Model.clash'.", hint=( "Rename field 'invalid_models_tests.Model.clash', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.foreign'." ), obj=Model._meta.get_field("foreign"), id="fields.E303", ), ], ) @isolate_apps("invalid_models_tests") class ComplexClashTests(SimpleTestCase): # New tests should not be included here, because this is a single, # self-contained sanity check, not a test of everything. def test_complex_clash(self): class Target(models.Model): tgt_safe = models.CharField(max_length=10) clash = models.CharField(max_length=10) model = models.CharField(max_length=10) clash1_set = models.CharField(max_length=10) class Model(models.Model): src_safe = models.CharField(max_length=10) foreign_1 = models.ForeignKey(Target, models.CASCADE, related_name="id") foreign_2 = models.ForeignKey( Target, models.CASCADE, related_name="src_safe" ) m2m_1 = models.ManyToManyField(Target, related_name="id") m2m_2 = models.ManyToManyField(Target, related_name="src_safe") self.assertEqual( Model.check(), [ Error( "Reverse accessor 'Target.id' for " "'invalid_models_tests.Model.foreign_1' clashes with field " "name 'invalid_models_tests.Target.id'.", hint=( "Rename field 'invalid_models_tests.Target.id', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.foreign_1'." ), obj=Model._meta.get_field("foreign_1"), id="fields.E302", ), Error( "Reverse query name for 'invalid_models_tests.Model.foreign_1' " "clashes with field name 'invalid_models_tests.Target.id'.", hint=( "Rename field 'invalid_models_tests.Target.id', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.foreign_1'." ), obj=Model._meta.get_field("foreign_1"), id="fields.E303", ), Error( "Reverse accessor 'Target.id' for " "'invalid_models_tests.Model.foreign_1' clashes with reverse " "accessor for 'invalid_models_tests.Model.m2m_1'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.foreign_1' or " "'invalid_models_tests.Model.m2m_1'." ), obj=Model._meta.get_field("foreign_1"), id="fields.E304", ), Error( "Reverse query name for 'invalid_models_tests.Model.foreign_1' " "clashes with reverse query name for " "'invalid_models_tests.Model.m2m_1'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.foreign_1' or " "'invalid_models_tests.Model.m2m_1'." ), obj=Model._meta.get_field("foreign_1"), id="fields.E305", ), Error( "Reverse accessor 'Target.src_safe' for " "'invalid_models_tests.Model.foreign_2' clashes with reverse " "accessor for 'invalid_models_tests.Model.m2m_2'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.foreign_2' or " "'invalid_models_tests.Model.m2m_2'." ), obj=Model._meta.get_field("foreign_2"), id="fields.E304", ), Error( "Reverse query name for 'invalid_models_tests.Model.foreign_2' " "clashes with reverse query name for " "'invalid_models_tests.Model.m2m_2'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.foreign_2' or " "'invalid_models_tests.Model.m2m_2'." ), obj=Model._meta.get_field("foreign_2"), id="fields.E305", ), Error( "Reverse accessor 'Target.id' for " "'invalid_models_tests.Model.m2m_1' clashes with field name " "'invalid_models_tests.Target.id'.", hint=( "Rename field 'invalid_models_tests.Target.id', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.m2m_1'." ), obj=Model._meta.get_field("m2m_1"), id="fields.E302", ), Error( "Reverse query name for 'invalid_models_tests.Model.m2m_1' " "clashes with field name 'invalid_models_tests.Target.id'.", hint=( "Rename field 'invalid_models_tests.Target.id', or " "add/change a related_name argument to the definition for " "field 'invalid_models_tests.Model.m2m_1'." ), obj=Model._meta.get_field("m2m_1"), id="fields.E303", ), Error( "Reverse accessor 'Target.id' for " "'invalid_models_tests.Model.m2m_1' clashes with reverse " "accessor for 'invalid_models_tests.Model.foreign_1'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.m2m_1' or " "'invalid_models_tests.Model.foreign_1'." ), obj=Model._meta.get_field("m2m_1"), id="fields.E304", ), Error( "Reverse query name for 'invalid_models_tests.Model.m2m_1' " "clashes with reverse query name for " "'invalid_models_tests.Model.foreign_1'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.m2m_1' or " "'invalid_models_tests.Model.foreign_1'." ), obj=Model._meta.get_field("m2m_1"), id="fields.E305", ), Error( "Reverse accessor 'Target.src_safe' for " "'invalid_models_tests.Model.m2m_2' clashes with reverse " "accessor for 'invalid_models_tests.Model.foreign_2'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.m2m_2' or " "'invalid_models_tests.Model.foreign_2'." ), obj=Model._meta.get_field("m2m_2"), id="fields.E304", ), Error( "Reverse query name for 'invalid_models_tests.Model.m2m_2' " "clashes with reverse query name for " "'invalid_models_tests.Model.foreign_2'.", hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Model.m2m_2' or " "'invalid_models_tests.Model.foreign_2'." ), obj=Model._meta.get_field("m2m_2"), id="fields.E305", ), ], ) def test_clash_parent_link(self): class Parent(models.Model): pass class Child(Parent): other_parent = models.OneToOneField(Parent, models.CASCADE) errors = [ ( "fields.E304", "accessor", " 'Parent.child'", "parent_ptr", "other_parent", ), ("fields.E305", "query name", "", "parent_ptr", "other_parent"), ( "fields.E304", "accessor", " 'Parent.child'", "other_parent", "parent_ptr", ), ("fields.E305", "query name", "", "other_parent", "parent_ptr"), ] self.assertEqual( Child.check(), [ Error( "Reverse %s%s for 'invalid_models_tests.Child.%s' clashes with " "reverse %s for 'invalid_models_tests.Child.%s'." % (attr, rel_name, field_name, attr, clash_name), hint=( "Add or change a related_name argument to the definition " "for 'invalid_models_tests.Child.%s' or " "'invalid_models_tests.Child.%s'." % (field_name, clash_name) ), obj=Child._meta.get_field(field_name), id=error_id, ) for error_id, attr, rel_name, field_name, clash_name in errors ], ) @isolate_apps("invalid_models_tests") class M2mThroughFieldsTests(SimpleTestCase): def test_m2m_field_argument_validation(self): """ ManyToManyField accepts the ``through_fields`` kwarg only if an intermediary table is specified. """ class Fan(models.Model): pass with self.assertRaisesMessage( ValueError, "Cannot specify through_fields without a through model" ): models.ManyToManyField(Fan, through_fields=("f1", "f2")) def test_invalid_order(self): """ Mixing up the order of link fields to ManyToManyField.through_fields triggers validation errors. """ class Fan(models.Model): pass class Event(models.Model): invitees = models.ManyToManyField( Fan, through="Invitation", through_fields=("invitee", "event") ) class Invitation(models.Model): event = models.ForeignKey(Event, models.CASCADE) invitee = models.ForeignKey(Fan, models.CASCADE) inviter = models.ForeignKey(Fan, models.CASCADE, related_name="+") field = Event._meta.get_field("invitees") self.assertEqual( field.check(from_model=Event), [ Error( "'Invitation.invitee' is not a foreign key to 'Event'.", hint=( "Did you mean one of the following foreign keys to 'Event': " "event?" ), obj=field, id="fields.E339", ), Error( "'Invitation.event' is not a foreign key to 'Fan'.", hint=( "Did you mean one of the following foreign keys to 'Fan': " "invitee, inviter?" ), obj=field, id="fields.E339", ), ], ) def test_invalid_field(self): """ Providing invalid field names to ManyToManyField.through_fields triggers validation errors. """ class Fan(models.Model): pass class Event(models.Model): invitees = models.ManyToManyField( Fan, through="Invitation", through_fields=("invalid_field_1", "invalid_field_2"), ) class Invitation(models.Model): event = models.ForeignKey(Event, models.CASCADE) invitee = models.ForeignKey(Fan, models.CASCADE) inviter = models.ForeignKey(Fan, models.CASCADE, related_name="+") field = Event._meta.get_field("invitees") self.assertEqual( field.check(from_model=Event), [ Error( "The intermediary model 'invalid_models_tests.Invitation' has no " "field 'invalid_field_1'.", hint=( "Did you mean one of the following foreign keys to 'Event': " "event?" ), obj=field, id="fields.E338", ), Error( "The intermediary model 'invalid_models_tests.Invitation' has no " "field 'invalid_field_2'.", hint=( "Did you mean one of the following foreign keys to 'Fan': " "invitee, inviter?" ), obj=field, id="fields.E338", ), ], ) def test_explicit_field_names(self): """ If ``through_fields`` kwarg is given, it must specify both link fields of the intermediary table. """ class Fan(models.Model): pass class Event(models.Model): invitees = models.ManyToManyField( Fan, through="Invitation", through_fields=(None, "invitee") ) class Invitation(models.Model): event = models.ForeignKey(Event, models.CASCADE) invitee = models.ForeignKey(Fan, models.CASCADE) inviter = models.ForeignKey(Fan, models.CASCADE, related_name="+") field = Event._meta.get_field("invitees") self.assertEqual( field.check(from_model=Event), [ Error( "Field specifies 'through_fields' but does not provide the names " "of the two link fields that should be used for the relation " "through model 'invalid_models_tests.Invitation'.", hint=( "Make sure you specify 'through_fields' as " "through_fields=('field1', 'field2')" ), obj=field, id="fields.E337", ), ], ) def test_superset_foreign_object(self): class Parent(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() c = models.PositiveIntegerField() class Meta: unique_together = (("a", "b", "c"),) class Child(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() value = models.CharField(max_length=255) parent = models.ForeignObject( Parent, on_delete=models.SET_NULL, from_fields=("a", "b"), to_fields=("a", "b"), related_name="children", ) field = Child._meta.get_field("parent") self.assertEqual( field.check(from_model=Child), [ Error( "No subset of the fields 'a', 'b' on model 'Parent' is unique.", hint=( "Mark a single field as unique=True or add a set of " "fields to a unique constraint (via unique_together or a " "UniqueConstraint (without condition) in the model " "Meta.constraints)." ), obj=field, id="fields.E310", ), ], ) def test_intersection_foreign_object(self): class Parent(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() c = models.PositiveIntegerField() d = models.PositiveIntegerField() class Meta: unique_together = (("a", "b", "c"),) class Child(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() d = models.PositiveIntegerField() value = models.CharField(max_length=255) parent = models.ForeignObject( Parent, on_delete=models.SET_NULL, from_fields=("a", "b", "d"), to_fields=("a", "b", "d"), related_name="children", ) field = Child._meta.get_field("parent") self.assertEqual( field.check(from_model=Child), [ Error( "No subset of the fields 'a', 'b', 'd' on model 'Parent' is " "unique.", hint=( "Mark a single field as unique=True or add a set of " "fields to a unique constraint (via unique_together or a " "UniqueConstraint (without condition) in the model " "Meta.constraints)." ), obj=field, id="fields.E310", ), ], )
d87b420416ce36893a62e3dad34649ba4fdb70c913fb49e220a3b3c7193ac8b1
import unittest from django.core.checks import Error, Warning from django.core.checks.model_checks import _check_lazy_references from django.db import connection, connections, models from django.db.models.functions import Abs, Lower, Round from django.db.models.signals import post_init from django.test import SimpleTestCase, TestCase, ignore_warnings, skipUnlessDBFeature from django.test.utils import isolate_apps, override_settings, register_lookup from django.utils.deprecation import RemovedInDjango51Warning class EmptyRouter: pass def get_max_column_name_length(): allowed_len = None db_alias = None for db in ("default", "other"): connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is not None and not connection.features.truncates_names: if allowed_len is None or max_name_length < allowed_len: allowed_len = max_name_length db_alias = db return (allowed_len, db_alias) @isolate_apps("invalid_models_tests") @ignore_warnings(category=RemovedInDjango51Warning) class IndexTogetherTests(SimpleTestCase): def test_non_iterable(self): class Model(models.Model): class Meta: index_together = 42 self.assertEqual( Model.check(), [ Error( "'index_together' must be a list or tuple.", obj=Model, id="models.E008", ), ], ) def test_non_list(self): class Model(models.Model): class Meta: index_together = "not-a-list" self.assertEqual( Model.check(), [ Error( "'index_together' must be a list or tuple.", obj=Model, id="models.E008", ), ], ) def test_list_containing_non_iterable(self): class Model(models.Model): class Meta: index_together = [("a", "b"), 42] self.assertEqual( Model.check(), [ Error( "All 'index_together' elements must be lists or tuples.", obj=Model, id="models.E009", ), ], ) def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: index_together = [["missing_field"]] self.assertEqual( Model.check(), [ Error( "'index_together' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ], ) def test_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.IntegerField() class Bar(Foo): field2 = models.IntegerField() class Meta: index_together = [["field2", "field1"]] self.assertEqual( Bar.check(), [ Error( "'index_together' refers to field 'field1' which is not " "local to model 'Bar'.", hint="This issue may be caused by multi-table inheritance.", obj=Bar, id="models.E016", ), ], ) def test_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField("self") class Meta: index_together = [["m2m"]] self.assertEqual( Model.check(), [ Error( "'index_together' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'index_together'.", obj=Model, id="models.E013", ), ], ) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey( Foo, on_delete=models.CASCADE, related_name="bar_1" ) foo_2 = models.ForeignKey( Foo, on_delete=models.CASCADE, related_name="bar_2" ) class Meta: index_together = [["foo_1_id", "foo_2"]] self.assertEqual(Bar.check(), []) # unique_together tests are very similar to index_together tests. @isolate_apps("invalid_models_tests") class UniqueTogetherTests(SimpleTestCase): def test_non_iterable(self): class Model(models.Model): class Meta: unique_together = 42 self.assertEqual( Model.check(), [ Error( "'unique_together' must be a list or tuple.", obj=Model, id="models.E010", ), ], ) def test_list_containing_non_iterable(self): class Model(models.Model): one = models.IntegerField() two = models.IntegerField() class Meta: unique_together = [("a", "b"), 42] self.assertEqual( Model.check(), [ Error( "All 'unique_together' elements must be lists or tuples.", obj=Model, id="models.E011", ), ], ) def test_non_list(self): class Model(models.Model): class Meta: unique_together = "not-a-list" self.assertEqual( Model.check(), [ Error( "'unique_together' must be a list or tuple.", obj=Model, id="models.E010", ), ], ) def test_valid_model(self): class Model(models.Model): one = models.IntegerField() two = models.IntegerField() class Meta: # unique_together can be a simple tuple unique_together = ("one", "two") self.assertEqual(Model.check(), []) def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: unique_together = [["missing_field"]] self.assertEqual( Model.check(), [ Error( "'unique_together' refers to the nonexistent field " "'missing_field'.", obj=Model, id="models.E012", ), ], ) def test_pointing_to_m2m(self): class Model(models.Model): m2m = models.ManyToManyField("self") class Meta: unique_together = [["m2m"]] self.assertEqual( Model.check(), [ Error( "'unique_together' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'unique_together'.", obj=Model, id="models.E013", ), ], ) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey( Foo, on_delete=models.CASCADE, related_name="bar_1" ) foo_2 = models.ForeignKey( Foo, on_delete=models.CASCADE, related_name="bar_2" ) class Meta: unique_together = [["foo_1_id", "foo_2"]] self.assertEqual(Bar.check(), []) @isolate_apps("invalid_models_tests") class IndexesTests(TestCase): def test_pointing_to_missing_field(self): class Model(models.Model): class Meta: indexes = [models.Index(fields=["missing_field"], name="name")] self.assertEqual( Model.check(), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ], ) def test_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField("self") class Meta: indexes = [models.Index(fields=["m2m"], name="name")] self.assertEqual( Model.check(), [ Error( "'indexes' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'indexes'.", obj=Model, id="models.E013", ), ], ) def test_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.IntegerField() class Bar(Foo): field2 = models.IntegerField() class Meta: indexes = [models.Index(fields=["field2", "field1"], name="name")] self.assertEqual( Bar.check(), [ Error( "'indexes' refers to field 'field1' which is not local to " "model 'Bar'.", hint="This issue may be caused by multi-table inheritance.", obj=Bar, id="models.E016", ), ], ) def test_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey( Foo, on_delete=models.CASCADE, related_name="bar_1" ) foo_2 = models.ForeignKey( Foo, on_delete=models.CASCADE, related_name="bar_2" ) class Meta: indexes = [ models.Index(fields=["foo_1_id", "foo_2"], name="index_name") ] self.assertEqual(Bar.check(), []) def test_name_constraints(self): class Model(models.Model): class Meta: indexes = [ models.Index(fields=["id"], name="_index_name"), models.Index(fields=["id"], name="5index_name"), ] self.assertEqual( Model.check(), [ Error( "The index name '%sindex_name' cannot start with an " "underscore or a number." % prefix, obj=Model, id="models.E033", ) for prefix in ("_", "5") ], ) def test_max_name_length(self): index_name = "x" * 31 class Model(models.Model): class Meta: indexes = [models.Index(fields=["id"], name=index_name)] self.assertEqual( Model.check(), [ Error( "The index name '%s' cannot be longer than 30 characters." % index_name, obj=Model, id="models.E034", ), ], ) def test_index_with_condition(self): class Model(models.Model): age = models.IntegerField() class Meta: indexes = [ models.Index( fields=["age"], name="index_age_gte_10", condition=models.Q(age__gte=10), ), ] errors = Model.check(databases=self.databases) expected = ( [] if connection.features.supports_partial_indexes else [ Warning( "%s does not support indexes with conditions." % connection.display_name, hint=( "Conditions will be ignored. Silence this warning if you " "don't care about it." ), obj=Model, id="models.W037", ) ] ) self.assertEqual(errors, expected) def test_index_with_condition_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {"supports_partial_indexes"} indexes = [ models.Index( fields=["age"], name="index_age_gte_10", condition=models.Q(age__gte=10), ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_index_with_include(self): class Model(models.Model): age = models.IntegerField() class Meta: indexes = [ models.Index( fields=["age"], name="index_age_include_id", include=["id"], ), ] errors = Model.check(databases=self.databases) expected = ( [] if connection.features.supports_covering_indexes else [ Warning( "%s does not support indexes with non-key columns." % connection.display_name, hint=( "Non-key columns will be ignored. Silence this warning if " "you don't care about it." ), obj=Model, id="models.W040", ) ] ) self.assertEqual(errors, expected) def test_index_with_include_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {"supports_covering_indexes"} indexes = [ models.Index( fields=["age"], name="index_age_include_id", include=["id"], ), ] self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature("supports_covering_indexes") def test_index_include_pointing_to_missing_field(self): class Model(models.Model): class Meta: indexes = [ models.Index(fields=["id"], include=["missing_field"], name="name"), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ], ) @skipUnlessDBFeature("supports_covering_indexes") def test_index_include_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField("self") class Meta: indexes = [models.Index(fields=["id"], include=["m2m"], name="name")] self.assertEqual( Model.check(databases=self.databases), [ Error( "'indexes' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'indexes'.", obj=Model, id="models.E013", ), ], ) @skipUnlessDBFeature("supports_covering_indexes") def test_index_include_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): field2 = models.IntegerField() class Meta: indexes = [ models.Index(fields=["field2"], include=["field1"], name="name"), ] self.assertEqual( Child.check(databases=self.databases), [ Error( "'indexes' refers to field 'field1' which is not local to " "model 'Child'.", hint="This issue may be caused by multi-table inheritance.", obj=Child, id="models.E016", ), ], ) @skipUnlessDBFeature("supports_covering_indexes") def test_index_include_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name="target_1") fk_2 = models.ForeignKey(Target, models.CASCADE, related_name="target_2") class Meta: constraints = [ models.Index( fields=["id"], include=["fk_1_id", "fk_2"], name="name", ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_func_index(self): class Model(models.Model): name = models.CharField(max_length=10) class Meta: indexes = [models.Index(Lower("name"), name="index_lower_name")] warn = Warning( "%s does not support indexes on expressions." % connection.display_name, hint=( "An index won't be created. Silence this warning if you don't " "care about it." ), obj=Model, id="models.W043", ) expected = [] if connection.features.supports_expression_indexes else [warn] self.assertEqual(Model.check(databases=self.databases), expected) def test_func_index_required_db_features(self): class Model(models.Model): name = models.CharField(max_length=10) class Meta: indexes = [models.Index(Lower("name"), name="index_lower_name")] required_db_features = {"supports_expression_indexes"} self.assertEqual(Model.check(databases=self.databases), []) def test_func_index_complex_expression_custom_lookup(self): class Model(models.Model): height = models.IntegerField() weight = models.IntegerField() class Meta: indexes = [ models.Index( models.F("height") / (models.F("weight__abs") + models.Value(5)), name="name", ), ] with register_lookup(models.IntegerField, Abs): self.assertEqual(Model.check(), []) def test_func_index_pointing_to_missing_field(self): class Model(models.Model): class Meta: indexes = [models.Index(Lower("missing_field").desc(), name="name")] self.assertEqual( Model.check(), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ], ) def test_func_index_pointing_to_missing_field_nested(self): class Model(models.Model): class Meta: indexes = [ models.Index(Abs(Round("missing_field")), name="name"), ] self.assertEqual( Model.check(), [ Error( "'indexes' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ], ) def test_func_index_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField("self") class Meta: indexes = [models.Index(Lower("m2m"), name="name")] self.assertEqual( Model.check(), [ Error( "'indexes' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'indexes'.", obj=Model, id="models.E013", ), ], ) def test_func_index_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.CharField(max_length=15) class Bar(Foo): class Meta: indexes = [models.Index(Lower("field1"), name="name")] self.assertEqual( Bar.check(), [ Error( "'indexes' refers to field 'field1' which is not local to " "model 'Bar'.", hint="This issue may be caused by multi-table inheritance.", obj=Bar, id="models.E016", ), ], ) def test_func_index_pointing_to_fk(self): class Foo(models.Model): pass class Bar(models.Model): foo_1 = models.ForeignKey(Foo, models.CASCADE, related_name="bar_1") foo_2 = models.ForeignKey(Foo, models.CASCADE, related_name="bar_2") class Meta: indexes = [ models.Index(Lower("foo_1_id"), Lower("foo_2"), name="index_name"), ] self.assertEqual(Bar.check(), []) @isolate_apps("invalid_models_tests") class FieldNamesTests(TestCase): databases = {"default", "other"} def test_ending_with_underscore(self): class Model(models.Model): field_ = models.CharField(max_length=10) m2m_ = models.ManyToManyField("self") self.assertEqual( Model.check(), [ Error( "Field names must not end with an underscore.", obj=Model._meta.get_field("field_"), id="fields.E001", ), Error( "Field names must not end with an underscore.", obj=Model._meta.get_field("m2m_"), id="fields.E001", ), ], ) max_column_name_length, column_limit_db_alias = get_max_column_name_length() @unittest.skipIf( max_column_name_length is None, "The database doesn't have a column name length limit.", ) def test_M2M_long_column_name(self): """ #13711 -- Model check for long M2M column names when database has column name length limits. """ # A model with very long name which will be used to set relations to. class VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz( models.Model ): title = models.CharField(max_length=11) # Main model for which checks will be performed. class ModelWithLongField(models.Model): m2m_field = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name="rn1", ) m2m_field2 = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name="rn2", through="m2msimple", ) m2m_field3 = models.ManyToManyField( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, related_name="rn3", through="m2mcomplex", ) fk = models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, related_name="rn4", ) # Models used for setting `through` in M2M field. class m2msimple(models.Model): id2 = models.ForeignKey(ModelWithLongField, models.CASCADE) class m2mcomplex(models.Model): id2 = models.ForeignKey(ModelWithLongField, models.CASCADE) long_field_name = "a" * (self.max_column_name_length + 1) models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, ).contribute_to_class(m2msimple, long_field_name) models.ForeignKey( VeryLongModelNamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz, models.CASCADE, db_column=long_field_name, ).contribute_to_class(m2mcomplex, long_field_name) errors = ModelWithLongField.check(databases=("default", "other")) # First error because of M2M field set on the model with long name. m2m_long_name = ( "verylongmodelnamezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz_id" ) if self.max_column_name_length > len(m2m_long_name): # Some databases support names longer than the test name. expected = [] else: expected = [ Error( 'Autogenerated column name too long for M2M field "%s". ' 'Maximum length is "%s" for database "%s".' % ( m2m_long_name, self.max_column_name_length, self.column_limit_db_alias, ), hint="Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'.", obj=ModelWithLongField, id="models.E019", ) ] # Second error because the FK specified in the `through` model # `m2msimple` has auto-generated name longer than allowed. # There will be no check errors in the other M2M because it # specifies db_column for the FK in `through` model even if the actual # name is longer than the limits of the database. expected.append( Error( 'Autogenerated column name too long for M2M field "%s_id". ' 'Maximum length is "%s" for database "%s".' % ( long_field_name, self.max_column_name_length, self.column_limit_db_alias, ), hint="Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'.", obj=ModelWithLongField, id="models.E019", ) ) self.assertEqual(errors, expected) # Check for long column names is called only for specified database # aliases. self.assertEqual(ModelWithLongField.check(databases=None), []) @unittest.skipIf( max_column_name_length is None, "The database doesn't have a column name length limit.", ) def test_local_field_long_column_name(self): """ #13711 -- Model check for long column names when database does not support long names. """ class ModelWithLongField(models.Model): title = models.CharField(max_length=11) long_field_name = "a" * (self.max_column_name_length + 1) long_field_name2 = "b" * (self.max_column_name_length + 1) models.CharField(max_length=11).contribute_to_class( ModelWithLongField, long_field_name ) models.CharField(max_length=11, db_column="vlmn").contribute_to_class( ModelWithLongField, long_field_name2 ) self.assertEqual( ModelWithLongField.check(databases=("default", "other")), [ Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % ( long_field_name, self.max_column_name_length, self.column_limit_db_alias, ), hint="Set the column name manually using 'db_column'.", obj=ModelWithLongField, id="models.E018", ) ], ) # Check for long column names is called only for specified database # aliases. self.assertEqual(ModelWithLongField.check(databases=None), []) def test_including_separator(self): class Model(models.Model): some__field = models.IntegerField() self.assertEqual( Model.check(), [ Error( 'Field names must not contain "__".', obj=Model._meta.get_field("some__field"), id="fields.E002", ) ], ) def test_pk(self): class Model(models.Model): pk = models.IntegerField() self.assertEqual( Model.check(), [ Error( "'pk' is a reserved word that cannot be used as a field name.", obj=Model._meta.get_field("pk"), id="fields.E003", ) ], ) def test_db_column_clash(self): class Model(models.Model): foo = models.IntegerField() bar = models.IntegerField(db_column="foo") self.assertEqual( Model.check(), [ Error( "Field 'bar' has column name 'foo' that is used by " "another field.", hint="Specify a 'db_column' for the field.", obj=Model, id="models.E007", ) ], ) @isolate_apps("invalid_models_tests") class ShadowingFieldsTests(SimpleTestCase): def test_field_name_clash_with_child_accessor(self): class Parent(models.Model): pass class Child(Parent): child = models.CharField(max_length=100) self.assertEqual( Child.check(), [ Error( "The field 'child' clashes with the field " "'child' from model 'invalid_models_tests.parent'.", obj=Child._meta.get_field("child"), id="models.E006", ) ], ) def test_field_name_clash_with_m2m_through(self): class Parent(models.Model): clash_id = models.IntegerField() class Child(Parent): clash = models.ForeignKey("Child", models.CASCADE) class Model(models.Model): parents = models.ManyToManyField( to=Parent, through="Through", through_fields=["parent", "model"], ) class Through(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) model = models.ForeignKey(Model, models.CASCADE) self.assertEqual( Child.check(), [ Error( "The field 'clash' clashes with the field 'clash_id' from " "model 'invalid_models_tests.parent'.", obj=Child._meta.get_field("clash"), id="models.E006", ) ], ) def test_multiinheritance_clash(self): class Mother(models.Model): clash = models.IntegerField() class Father(models.Model): clash = models.IntegerField() class Child(Mother, Father): # Here we have two clashed: id (automatic field) and clash, because # both parents define these fields. pass self.assertEqual( Child.check(), [ Error( "The field 'id' from parent model " "'invalid_models_tests.mother' clashes with the field 'id' " "from parent model 'invalid_models_tests.father'.", obj=Child, id="models.E005", ), Error( "The field 'clash' from parent model " "'invalid_models_tests.mother' clashes with the field 'clash' " "from parent model 'invalid_models_tests.father'.", obj=Child, id="models.E005", ), ], ) def test_inheritance_clash(self): class Parent(models.Model): f_id = models.IntegerField() class Target(models.Model): # This field doesn't result in a clash. f_id = models.IntegerField() class Child(Parent): # This field clashes with parent "f_id" field. f = models.ForeignKey(Target, models.CASCADE) self.assertEqual( Child.check(), [ Error( "The field 'f' clashes with the field 'f_id' " "from model 'invalid_models_tests.parent'.", obj=Child._meta.get_field("f"), id="models.E006", ) ], ) def test_multigeneration_inheritance(self): class GrandParent(models.Model): clash = models.IntegerField() class Parent(GrandParent): pass class Child(Parent): pass class GrandChild(Child): clash = models.IntegerField() self.assertEqual( GrandChild.check(), [ Error( "The field 'clash' clashes with the field 'clash' " "from model 'invalid_models_tests.grandparent'.", obj=GrandChild._meta.get_field("clash"), id="models.E006", ) ], ) def test_id_clash(self): class Target(models.Model): pass class Model(models.Model): fk = models.ForeignKey(Target, models.CASCADE) fk_id = models.IntegerField() self.assertEqual( Model.check(), [ Error( "The field 'fk_id' clashes with the field 'fk' from model " "'invalid_models_tests.model'.", obj=Model._meta.get_field("fk_id"), id="models.E006", ) ], ) @isolate_apps("invalid_models_tests") class OtherModelTests(SimpleTestCase): def test_unique_primary_key(self): invalid_id = models.IntegerField(primary_key=False) class Model(models.Model): id = invalid_id self.assertEqual( Model.check(), [ Error( "'id' can only be used as a field name if the field also sets " "'primary_key=True'.", obj=Model, id="models.E004", ), ], ) def test_ordering_non_iterable(self): class Model(models.Model): class Meta: ordering = "missing_field" self.assertEqual( Model.check(), [ Error( "'ordering' must be a tuple or list " "(even if you want to order by only one field).", obj=Model, id="models.E014", ), ], ) def test_just_ordering_no_errors(self): class Model(models.Model): order = models.PositiveIntegerField() class Meta: ordering = ["order"] self.assertEqual(Model.check(), []) def test_just_order_with_respect_to_no_errors(self): class Question(models.Model): pass class Answer(models.Model): question = models.ForeignKey(Question, models.CASCADE) class Meta: order_with_respect_to = "question" self.assertEqual(Answer.check(), []) def test_ordering_with_order_with_respect_to(self): class Question(models.Model): pass class Answer(models.Model): question = models.ForeignKey(Question, models.CASCADE) order = models.IntegerField() class Meta: order_with_respect_to = "question" ordering = ["order"] self.assertEqual( Answer.check(), [ Error( "'ordering' and 'order_with_respect_to' cannot be used together.", obj=Answer, id="models.E021", ), ], ) def test_non_valid(self): class RelationModel(models.Model): pass class Model(models.Model): relation = models.ManyToManyField(RelationModel) class Meta: ordering = ["relation"] self.assertEqual( Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'relation'.", obj=Model, id="models.E015", ), ], ) def test_ordering_pointing_to_missing_field(self): class Model(models.Model): class Meta: ordering = ("missing_field",) self.assertEqual( Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_field'.", obj=Model, id="models.E015", ) ], ) def test_ordering_pointing_to_missing_foreignkey_field(self): class Model(models.Model): missing_fk_field = models.IntegerField() class Meta: ordering = ("missing_fk_field_id",) self.assertEqual( Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_fk_field_id'.", obj=Model, id="models.E015", ) ], ) def test_ordering_pointing_to_missing_related_field(self): class Model(models.Model): test = models.IntegerField() class Meta: ordering = ("missing_related__id",) self.assertEqual( Model.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'missing_related__id'.", obj=Model, id="models.E015", ) ], ) def test_ordering_pointing_to_missing_related_model_field(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ("parent__missing_field",) self.assertEqual( Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__missing_field'.", obj=Child, id="models.E015", ) ], ) def test_ordering_pointing_to_non_related_field(self): class Child(models.Model): parent = models.IntegerField() class Meta: ordering = ("parent__missing_field",) self.assertEqual( Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__missing_field'.", obj=Child, id="models.E015", ) ], ) def test_ordering_pointing_to_two_related_model_field(self): class Parent2(models.Model): pass class Parent1(models.Model): parent2 = models.ForeignKey(Parent2, models.CASCADE) class Child(models.Model): parent1 = models.ForeignKey(Parent1, models.CASCADE) class Meta: ordering = ("parent1__parent2__missing_field",) self.assertEqual( Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent1__parent2__missing_field'.", obj=Child, id="models.E015", ) ], ) def test_ordering_pointing_multiple_times_to_model_fields(self): class Parent(models.Model): field1 = models.CharField(max_length=100) field2 = models.CharField(max_length=100) class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ("parent__field1__field2",) self.assertEqual( Child.check(), [ Error( "'ordering' refers to the nonexistent field, related field, " "or lookup 'parent__field1__field2'.", obj=Child, id="models.E015", ) ], ) def test_ordering_allows_registered_lookups(self): class Model(models.Model): test = models.CharField(max_length=100) class Meta: ordering = ("test__lower",) with register_lookup(models.CharField, Lower): self.assertEqual(Model.check(), []) def test_ordering_pointing_to_lookup_not_transform(self): class Model(models.Model): test = models.CharField(max_length=100) class Meta: ordering = ("test__isnull",) self.assertEqual(Model.check(), []) def test_ordering_pointing_to_related_model_pk(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ("parent__pk",) self.assertEqual(Child.check(), []) def test_ordering_pointing_to_foreignkey_field(self): class Parent(models.Model): pass class Child(models.Model): parent = models.ForeignKey(Parent, models.CASCADE) class Meta: ordering = ("parent_id",) self.assertFalse(Child.check()) def test_name_beginning_with_underscore(self): class _Model(models.Model): pass self.assertEqual( _Model.check(), [ Error( "The model name '_Model' cannot start or end with an underscore " "as it collides with the query lookup syntax.", obj=_Model, id="models.E023", ) ], ) def test_name_ending_with_underscore(self): class Model_(models.Model): pass self.assertEqual( Model_.check(), [ Error( "The model name 'Model_' cannot start or end with an underscore " "as it collides with the query lookup syntax.", obj=Model_, id="models.E023", ) ], ) def test_name_contains_double_underscores(self): class Test__Model(models.Model): pass self.assertEqual( Test__Model.check(), [ Error( "The model name 'Test__Model' cannot contain double underscores " "as it collides with the query lookup syntax.", obj=Test__Model, id="models.E024", ) ], ) def test_property_and_related_field_accessor_clash(self): class Model(models.Model): fk = models.ForeignKey("self", models.CASCADE) # Override related field accessor. Model.fk_id = property(lambda self: "ERROR") self.assertEqual( Model.check(), [ Error( "The property 'fk_id' clashes with a related field accessor.", obj=Model, id="models.E025", ) ], ) def test_single_primary_key(self): class Model(models.Model): foo = models.IntegerField(primary_key=True) bar = models.IntegerField(primary_key=True) self.assertEqual( Model.check(), [ Error( "The model cannot have more than one field with " "'primary_key=True'.", obj=Model, id="models.E026", ) ], ) @override_settings(TEST_SWAPPED_MODEL_BAD_VALUE="not-a-model") def test_swappable_missing_app_name(self): class Model(models.Model): class Meta: swappable = "TEST_SWAPPED_MODEL_BAD_VALUE" self.assertEqual( Model.check(), [ Error( "'TEST_SWAPPED_MODEL_BAD_VALUE' is not of the form " "'app_label.app_name'.", id="models.E001", ), ], ) @override_settings(TEST_SWAPPED_MODEL_BAD_MODEL="not_an_app.Target") def test_swappable_missing_app(self): class Model(models.Model): class Meta: swappable = "TEST_SWAPPED_MODEL_BAD_MODEL" self.assertEqual( Model.check(), [ Error( "'TEST_SWAPPED_MODEL_BAD_MODEL' references 'not_an_app.Target', " "which has not been installed, or is abstract.", id="models.E002", ), ], ) def test_two_m2m_through_same_relationship(self): class Person(models.Model): pass class Group(models.Model): primary = models.ManyToManyField( Person, through="Membership", related_name="primary" ) secondary = models.ManyToManyField( Person, through="Membership", related_name="secondary" ) class Membership(models.Model): person = models.ForeignKey(Person, models.CASCADE) group = models.ForeignKey(Group, models.CASCADE) self.assertEqual( Group.check(), [ Error( "The model has two identical many-to-many relations through " "the intermediate model 'invalid_models_tests.Membership'.", obj=Group, id="models.E003", ) ], ) def test_two_m2m_through_same_model_with_different_through_fields(self): class Country(models.Model): pass class ShippingMethod(models.Model): to_countries = models.ManyToManyField( Country, through="ShippingMethodPrice", through_fields=("method", "to_country"), ) from_countries = models.ManyToManyField( Country, through="ShippingMethodPrice", through_fields=("method", "from_country"), related_name="+", ) class ShippingMethodPrice(models.Model): method = models.ForeignKey(ShippingMethod, models.CASCADE) to_country = models.ForeignKey(Country, models.CASCADE) from_country = models.ForeignKey(Country, models.CASCADE) self.assertEqual(ShippingMethod.check(), []) def test_onetoone_with_parent_model(self): class Place(models.Model): pass class ParkingLot(Place): other_place = models.OneToOneField( Place, models.CASCADE, related_name="other_parking" ) self.assertEqual(ParkingLot.check(), []) def test_onetoone_with_explicit_parent_link_parent_model(self): class Place(models.Model): pass class ParkingLot(Place): place = models.OneToOneField( Place, models.CASCADE, parent_link=True, primary_key=True ) other_place = models.OneToOneField( Place, models.CASCADE, related_name="other_parking" ) self.assertEqual(ParkingLot.check(), []) def test_m2m_table_name_clash(self): class Foo(models.Model): bar = models.ManyToManyField("Bar", db_table="myapp_bar") class Meta: db_table = "myapp_foo" class Bar(models.Model): class Meta: db_table = "myapp_bar" self.assertEqual( Foo.check(), [ Error( "The field's intermediary table 'myapp_bar' clashes with the " "table name of 'invalid_models_tests.Bar'.", obj=Foo._meta.get_field("bar"), id="fields.E340", ) ], ) @override_settings( DATABASE_ROUTERS=["invalid_models_tests.test_models.EmptyRouter"] ) def test_m2m_table_name_clash_database_routers_installed(self): class Foo(models.Model): bar = models.ManyToManyField("Bar", db_table="myapp_bar") class Meta: db_table = "myapp_foo" class Bar(models.Model): class Meta: db_table = "myapp_bar" self.assertEqual( Foo.check(), [ Warning( "The field's intermediary table 'myapp_bar' clashes with the " "table name of 'invalid_models_tests.Bar'.", obj=Foo._meta.get_field("bar"), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.Bar' is " "correctly routed to a separate database." ), id="fields.W344", ), ], ) def test_m2m_field_table_name_clash(self): class Foo(models.Model): pass class Bar(models.Model): foos = models.ManyToManyField(Foo, db_table="clash") class Baz(models.Model): foos = models.ManyToManyField(Foo, db_table="clash") self.assertEqual( Bar.check() + Baz.check(), [ Error( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.Baz.foos'.", obj=Bar._meta.get_field("foos"), id="fields.E340", ), Error( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.Bar.foos'.", obj=Baz._meta.get_field("foos"), id="fields.E340", ), ], ) @override_settings( DATABASE_ROUTERS=["invalid_models_tests.test_models.EmptyRouter"] ) def test_m2m_field_table_name_clash_database_routers_installed(self): class Foo(models.Model): pass class Bar(models.Model): foos = models.ManyToManyField(Foo, db_table="clash") class Baz(models.Model): foos = models.ManyToManyField(Foo, db_table="clash") self.assertEqual( Bar.check() + Baz.check(), [ Warning( "The field's intermediary table 'clash' clashes with the " "table name of 'invalid_models_tests.%s.foos'." % clashing_model, obj=model_cls._meta.get_field("foos"), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.%s.foos' is " "correctly routed to a separate database." % clashing_model ), id="fields.W344", ) for model_cls, clashing_model in [(Bar, "Baz"), (Baz, "Bar")] ], ) def test_m2m_autogenerated_table_name_clash(self): class Foo(models.Model): class Meta: db_table = "bar_foos" class Bar(models.Model): # The autogenerated `db_table` will be bar_foos. foos = models.ManyToManyField(Foo) class Meta: db_table = "bar" self.assertEqual( Bar.check(), [ Error( "The field's intermediary table 'bar_foos' clashes with the " "table name of 'invalid_models_tests.Foo'.", obj=Bar._meta.get_field("foos"), id="fields.E340", ) ], ) @override_settings( DATABASE_ROUTERS=["invalid_models_tests.test_models.EmptyRouter"] ) def test_m2m_autogenerated_table_name_clash_database_routers_installed(self): class Foo(models.Model): class Meta: db_table = "bar_foos" class Bar(models.Model): # The autogenerated db_table is bar_foos. foos = models.ManyToManyField(Foo) class Meta: db_table = "bar" self.assertEqual( Bar.check(), [ Warning( "The field's intermediary table 'bar_foos' clashes with the " "table name of 'invalid_models_tests.Foo'.", obj=Bar._meta.get_field("foos"), hint=( "You have configured settings.DATABASE_ROUTERS. Verify " "that the table of 'invalid_models_tests.Foo' is " "correctly routed to a separate database." ), id="fields.W344", ), ], ) def test_m2m_unmanaged_shadow_models_not_checked(self): class A1(models.Model): pass class C1(models.Model): mm_a = models.ManyToManyField(A1, db_table="d1") # Unmanaged models that shadow the above models. Reused table names # shouldn't be flagged by any checks. class A2(models.Model): class Meta: managed = False class C2(models.Model): mm_a = models.ManyToManyField(A2, through="Intermediate") class Meta: managed = False class Intermediate(models.Model): a2 = models.ForeignKey(A2, models.CASCADE, db_column="a1_id") c2 = models.ForeignKey(C2, models.CASCADE, db_column="c1_id") class Meta: db_table = "d1" managed = False self.assertEqual(C1.check(), []) self.assertEqual(C2.check(), []) def test_m2m_to_concrete_and_proxy_allowed(self): class A(models.Model): pass class Through(models.Model): a = models.ForeignKey("A", models.CASCADE) c = models.ForeignKey("C", models.CASCADE) class ThroughProxy(Through): class Meta: proxy = True class C(models.Model): mm_a = models.ManyToManyField(A, through=Through) mm_aproxy = models.ManyToManyField( A, through=ThroughProxy, related_name="proxied_m2m" ) self.assertEqual(C.check(), []) @isolate_apps("django.contrib.auth", kwarg_name="apps") def test_lazy_reference_checks(self, apps): class DummyModel(models.Model): author = models.ForeignKey("Author", models.CASCADE) class Meta: app_label = "invalid_models_tests" class DummyClass: def __call__(self, **kwargs): pass def dummy_method(self): pass def dummy_function(*args, **kwargs): pass apps.lazy_model_operation(dummy_function, ("auth", "imaginarymodel")) apps.lazy_model_operation(dummy_function, ("fanciful_app", "imaginarymodel")) post_init.connect(dummy_function, sender="missing-app.Model", apps=apps) post_init.connect(DummyClass(), sender="missing-app.Model", apps=apps) post_init.connect( DummyClass().dummy_method, sender="missing-app.Model", apps=apps ) self.assertEqual( _check_lazy_references(apps), [ Error( "%r contains a lazy reference to auth.imaginarymodel, " "but app 'auth' doesn't provide model 'imaginarymodel'." % dummy_function, obj=dummy_function, id="models.E022", ), Error( "%r contains a lazy reference to fanciful_app.imaginarymodel, " "but app 'fanciful_app' isn't installed." % dummy_function, obj=dummy_function, id="models.E022", ), Error( "An instance of class 'DummyClass' was connected to " "the 'post_init' signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj="invalid_models_tests.test_models", id="signals.E001", ), Error( "Bound method 'DummyClass.dummy_method' was connected to the " "'post_init' signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj="invalid_models_tests.test_models", id="signals.E001", ), Error( "The field invalid_models_tests.DummyModel.author was declared " "with a lazy reference to 'invalid_models_tests.author', but app " "'invalid_models_tests' isn't installed.", hint=None, obj=DummyModel.author.field, id="fields.E307", ), Error( "The function 'dummy_function' was connected to the 'post_init' " "signal with a lazy reference to the sender " "'missing-app.model', but app 'missing-app' isn't installed.", hint=None, obj="invalid_models_tests.test_models", id="signals.E001", ), ], ) @isolate_apps("invalid_models_tests") class DbTableCommentTests(TestCase): def test_db_table_comment(self): class Model(models.Model): class Meta: db_table_comment = "Table comment" errors = Model.check(databases=self.databases) expected = ( [] if connection.features.supports_comments else [ Warning( f"{connection.display_name} does not support comments on tables " f"(db_table_comment).", obj=Model, id="models.W046", ), ] ) self.assertEqual(errors, expected) def test_db_table_comment_required_db_features(self): class Model(models.Model): class Meta: db_table_comment = "Table comment" required_db_features = {"supports_comments"} self.assertEqual(Model.check(databases=self.databases), []) class MultipleAutoFieldsTests(TestCase): def test_multiple_autofields(self): msg = ( "Model invalid_models_tests.MultipleAutoFields can't have more " "than one auto-generated field." ) with self.assertRaisesMessage(ValueError, msg): class MultipleAutoFields(models.Model): auto1 = models.AutoField(primary_key=True) auto2 = models.AutoField(primary_key=True) @isolate_apps("invalid_models_tests") class JSONFieldTests(TestCase): @skipUnlessDBFeature("supports_json_field") def test_ordering_pointing_to_json_field_value(self): class Model(models.Model): field = models.JSONField() class Meta: ordering = ["field__value"] self.assertEqual(Model.check(databases=self.databases), []) def test_check_jsonfield(self): class Model(models.Model): field = models.JSONField() error = Error( "%s does not support JSONFields." % connection.display_name, obj=Model, id="fields.E180", ) expected = [] if connection.features.supports_json_field else [error] self.assertEqual(Model.check(databases=self.databases), expected) def test_check_jsonfield_required_db_features(self): class Model(models.Model): field = models.JSONField() class Meta: required_db_features = {"supports_json_field"} self.assertEqual(Model.check(databases=self.databases), []) @isolate_apps("invalid_models_tests") class ConstraintsTests(TestCase): def test_check_constraints(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.CheckConstraint(check=models.Q(age__gte=18), name="is_adult") ] errors = Model.check(databases=self.databases) warn = Warning( "%s does not support check constraints." % connection.display_name, hint=( "A constraint won't be created. Silence this warning if you " "don't care about it." ), obj=Model, id="models.W027", ) expected = ( [] if connection.features.supports_table_check_constraints else [warn] ) self.assertCountEqual(errors, expected) def test_check_constraints_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {"supports_table_check_constraints"} constraints = [ models.CheckConstraint(check=models.Q(age__gte=18), name="is_adult") ] self.assertEqual(Model.check(databases=self.databases), []) def test_check_constraint_pointing_to_missing_field(self): class Model(models.Model): class Meta: required_db_features = {"supports_table_check_constraints"} constraints = [ models.CheckConstraint( name="name", check=models.Q(missing_field=2), ), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ] if connection.features.supports_table_check_constraints else [], ) @skipUnlessDBFeature("supports_table_check_constraints") def test_check_constraint_pointing_to_reverse_fk(self): class Model(models.Model): parent = models.ForeignKey("self", models.CASCADE, related_name="parents") class Meta: constraints = [ models.CheckConstraint(name="name", check=models.Q(parents=3)), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'parents'.", obj=Model, id="models.E012", ), ], ) @skipUnlessDBFeature("supports_table_check_constraints") def test_check_constraint_pointing_to_reverse_o2o(self): class Model(models.Model): parent = models.OneToOneField("self", models.CASCADE) class Meta: constraints = [ models.CheckConstraint( name="name", check=models.Q(model__isnull=True), ), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'model'.", obj=Model, id="models.E012", ), ], ) @skipUnlessDBFeature("supports_table_check_constraints") def test_check_constraint_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField("self") class Meta: constraints = [ models.CheckConstraint(name="name", check=models.Q(m2m=2)), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'constraints'.", obj=Model, id="models.E013", ), ], ) @skipUnlessDBFeature("supports_table_check_constraints") def test_check_constraint_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name="target_1") fk_2 = models.ForeignKey(Target, models.CASCADE, related_name="target_2") class Meta: constraints = [ models.CheckConstraint( name="name", check=models.Q(fk_1_id=2) | models.Q(fk_2=2), ), ] self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature("supports_table_check_constraints") def test_check_constraint_pointing_to_pk(self): class Model(models.Model): age = models.SmallIntegerField() class Meta: constraints = [ models.CheckConstraint( name="name", check=models.Q(pk__gt=5) & models.Q(age__gt=models.F("pk")), ), ] self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature("supports_table_check_constraints") def test_check_constraint_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): pass class Meta: constraints = [ models.CheckConstraint(name="name", check=models.Q(field1=1)), ] self.assertEqual( Child.check(databases=self.databases), [ Error( "'constraints' refers to field 'field1' which is not local to " "model 'Child'.", hint="This issue may be caused by multi-table inheritance.", obj=Child, id="models.E016", ), ], ) @skipUnlessDBFeature("supports_table_check_constraints") def test_check_constraint_pointing_to_joined_fields(self): class Model(models.Model): name = models.CharField(max_length=10) field1 = models.PositiveSmallIntegerField() field2 = models.PositiveSmallIntegerField() field3 = models.PositiveSmallIntegerField() parent = models.ForeignKey("self", models.CASCADE) previous = models.OneToOneField("self", models.CASCADE, related_name="next") class Meta: constraints = [ models.CheckConstraint( name="name1", check=models.Q( field1__lt=models.F("parent__field1") + models.F("parent__field2") ), ), models.CheckConstraint( name="name2", check=models.Q(name=Lower("parent__name")) ), models.CheckConstraint( name="name3", check=models.Q(parent__field3=models.F("field1")) ), models.CheckConstraint( name="name4", check=models.Q(name=Lower("previous__name")), ), ] joined_fields = [ "parent__field1", "parent__field2", "parent__field3", "parent__name", "previous__name", ] errors = Model.check(databases=self.databases) expected_errors = [ Error( "'constraints' refers to the joined field '%s'." % field_name, obj=Model, id="models.E041", ) for field_name in joined_fields ] self.assertCountEqual(errors, expected_errors) @skipUnlessDBFeature("supports_table_check_constraints") def test_check_constraint_pointing_to_joined_fields_complex_check(self): class Model(models.Model): name = models.PositiveSmallIntegerField() field1 = models.PositiveSmallIntegerField() field2 = models.PositiveSmallIntegerField() parent = models.ForeignKey("self", models.CASCADE) class Meta: constraints = [ models.CheckConstraint( name="name", check=models.Q( ( models.Q(name="test") & models.Q(field1__lt=models.F("parent__field1")) ) | ( models.Q(name__startswith=Lower("parent__name")) & models.Q( field1__gte=( models.F("parent__field1") + models.F("parent__field2") ) ) ) ) | (models.Q(name="test1")), ), ] joined_fields = ["parent__field1", "parent__field2", "parent__name"] errors = Model.check(databases=self.databases) expected_errors = [ Error( "'constraints' refers to the joined field '%s'." % field_name, obj=Model, id="models.E041", ) for field_name in joined_fields ] self.assertCountEqual(errors, expected_errors) def test_check_constraint_raw_sql_check(self): class Model(models.Model): class Meta: required_db_features = {"supports_table_check_constraints"} constraints = [ models.CheckConstraint(check=models.Q(id__gt=0), name="q_check"), models.CheckConstraint( check=models.ExpressionWrapper( models.Q(price__gt=20), output_field=models.BooleanField(), ), name="expression_wrapper_check", ), models.CheckConstraint( check=models.expressions.RawSQL( "id = 0", params=(), output_field=models.BooleanField(), ), name="raw_sql_check", ), models.CheckConstraint( check=models.Q( models.ExpressionWrapper( models.Q( models.expressions.RawSQL( "id = 0", params=(), output_field=models.BooleanField(), ) ), output_field=models.BooleanField(), ) ), name="nested_raw_sql_check", ), ] expected_warnings = ( [ Warning( "Check constraint 'raw_sql_check' contains RawSQL() expression and " "won't be validated during the model full_clean().", hint="Silence this warning if you don't care about it.", obj=Model, id="models.W045", ), Warning( "Check constraint 'nested_raw_sql_check' contains RawSQL() " "expression and won't be validated during the model full_clean().", hint="Silence this warning if you don't care about it.", obj=Model, id="models.W045", ), ] if connection.features.supports_table_check_constraints else [] ) self.assertEqual(Model.check(databases=self.databases), expected_warnings) def test_unique_constraint_with_condition(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=["age"], name="unique_age_gte_100", condition=models.Q(age__gte=100), ), ] errors = Model.check(databases=self.databases) expected = ( [] if connection.features.supports_partial_indexes else [ Warning( "%s does not support unique constraints with conditions." % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id="models.W036", ), ] ) self.assertEqual(errors, expected) def test_unique_constraint_with_condition_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {"supports_partial_indexes"} constraints = [ models.UniqueConstraint( fields=["age"], name="unique_age_gte_100", condition=models.Q(age__gte=100), ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_unique_constraint_condition_pointing_to_missing_field(self): class Model(models.Model): age = models.SmallIntegerField() class Meta: required_db_features = {"supports_partial_indexes"} constraints = [ models.UniqueConstraint( name="name", fields=["age"], condition=models.Q(missing_field=2), ), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ] if connection.features.supports_partial_indexes else [], ) def test_unique_constraint_condition_pointing_to_joined_fields(self): class Model(models.Model): age = models.SmallIntegerField() parent = models.ForeignKey("self", models.CASCADE) class Meta: required_db_features = {"supports_partial_indexes"} constraints = [ models.UniqueConstraint( name="name", fields=["age"], condition=models.Q(parent__age__lt=2), ), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to the joined field 'parent__age__lt'.", obj=Model, id="models.E041", ) ] if connection.features.supports_partial_indexes else [], ) def test_unique_constraint_pointing_to_reverse_o2o(self): class Model(models.Model): parent = models.OneToOneField("self", models.CASCADE) class Meta: required_db_features = {"supports_partial_indexes"} constraints = [ models.UniqueConstraint( fields=["parent"], name="name", condition=models.Q(model__isnull=True), ), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'model'.", obj=Model, id="models.E012", ), ] if connection.features.supports_partial_indexes else [], ) def test_deferrable_unique_constraint(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=["age"], name="unique_age_deferrable", deferrable=models.Deferrable.DEFERRED, ), ] errors = Model.check(databases=self.databases) expected = ( [] if connection.features.supports_deferrable_unique_constraints else [ Warning( "%s does not support deferrable unique constraints." % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id="models.W038", ), ] ) self.assertEqual(errors, expected) def test_deferrable_unique_constraint_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {"supports_deferrable_unique_constraints"} constraints = [ models.UniqueConstraint( fields=["age"], name="unique_age_deferrable", deferrable=models.Deferrable.IMMEDIATE, ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_unique_constraint_pointing_to_missing_field(self): class Model(models.Model): class Meta: constraints = [ models.UniqueConstraint(fields=["missing_field"], name="name") ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ], ) def test_unique_constraint_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField("self") class Meta: constraints = [models.UniqueConstraint(fields=["m2m"], name="name")] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'constraints'.", obj=Model, id="models.E013", ), ], ) def test_unique_constraint_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): field2 = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint(fields=["field2", "field1"], name="name"), ] self.assertEqual( Child.check(databases=self.databases), [ Error( "'constraints' refers to field 'field1' which is not local to " "model 'Child'.", hint="This issue may be caused by multi-table inheritance.", obj=Child, id="models.E016", ), ], ) def test_unique_constraint_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name="target_1") fk_2 = models.ForeignKey(Target, models.CASCADE, related_name="target_2") class Meta: constraints = [ models.UniqueConstraint(fields=["fk_1_id", "fk_2"], name="name"), ] self.assertEqual(Model.check(databases=self.databases), []) def test_unique_constraint_with_include(self): class Model(models.Model): age = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=["age"], name="unique_age_include_id", include=["id"], ), ] errors = Model.check(databases=self.databases) expected = ( [] if connection.features.supports_covering_indexes else [ Warning( "%s does not support unique constraints with non-key columns." % connection.display_name, hint=( "A constraint won't be created. Silence this warning if " "you don't care about it." ), obj=Model, id="models.W039", ), ] ) self.assertEqual(errors, expected) def test_unique_constraint_with_include_required_db_features(self): class Model(models.Model): age = models.IntegerField() class Meta: required_db_features = {"supports_covering_indexes"} constraints = [ models.UniqueConstraint( fields=["age"], name="unique_age_include_id", include=["id"], ), ] self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature("supports_covering_indexes") def test_unique_constraint_include_pointing_to_missing_field(self): class Model(models.Model): class Meta: constraints = [ models.UniqueConstraint( fields=["id"], include=["missing_field"], name="name", ), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ], ) @skipUnlessDBFeature("supports_covering_indexes") def test_unique_constraint_include_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField("self") class Meta: constraints = [ models.UniqueConstraint( fields=["id"], include=["m2m"], name="name", ), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'constraints'.", obj=Model, id="models.E013", ), ], ) @skipUnlessDBFeature("supports_covering_indexes") def test_unique_constraint_include_pointing_to_non_local_field(self): class Parent(models.Model): field1 = models.IntegerField() class Child(Parent): field2 = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( fields=["field2"], include=["field1"], name="name", ), ] self.assertEqual( Child.check(databases=self.databases), [ Error( "'constraints' refers to field 'field1' which is not local to " "model 'Child'.", hint="This issue may be caused by multi-table inheritance.", obj=Child, id="models.E016", ), ], ) @skipUnlessDBFeature("supports_covering_indexes") def test_unique_constraint_include_pointing_to_fk(self): class Target(models.Model): pass class Model(models.Model): fk_1 = models.ForeignKey(Target, models.CASCADE, related_name="target_1") fk_2 = models.ForeignKey(Target, models.CASCADE, related_name="target_2") class Meta: constraints = [ models.UniqueConstraint( fields=["id"], include=["fk_1_id", "fk_2"], name="name", ), ] self.assertEqual(Model.check(databases=self.databases), []) def test_func_unique_constraint(self): class Model(models.Model): name = models.CharField(max_length=10) class Meta: constraints = [ models.UniqueConstraint(Lower("name"), name="lower_name_uq"), ] warn = Warning( "%s does not support unique constraints on expressions." % connection.display_name, hint=( "A constraint won't be created. Silence this warning if you " "don't care about it." ), obj=Model, id="models.W044", ) expected = [] if connection.features.supports_expression_indexes else [warn] self.assertEqual(Model.check(databases=self.databases), expected) def test_func_unique_constraint_required_db_features(self): class Model(models.Model): name = models.CharField(max_length=10) class Meta: constraints = [ models.UniqueConstraint(Lower("name"), name="lower_name_unq"), ] required_db_features = {"supports_expression_indexes"} self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature("supports_expression_indexes") def test_func_unique_constraint_expression_custom_lookup(self): class Model(models.Model): height = models.IntegerField() weight = models.IntegerField() class Meta: constraints = [ models.UniqueConstraint( models.F("height") / (models.F("weight__abs") + models.Value(5)), name="name", ), ] with register_lookup(models.IntegerField, Abs): self.assertEqual(Model.check(databases=self.databases), []) @skipUnlessDBFeature("supports_expression_indexes") def test_func_unique_constraint_pointing_to_missing_field(self): class Model(models.Model): class Meta: constraints = [ models.UniqueConstraint(Lower("missing_field").desc(), name="name"), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ], ) @skipUnlessDBFeature("supports_expression_indexes") def test_func_unique_constraint_pointing_to_missing_field_nested(self): class Model(models.Model): class Meta: constraints = [ models.UniqueConstraint(Abs(Round("missing_field")), name="name"), ] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to the nonexistent field 'missing_field'.", obj=Model, id="models.E012", ), ], ) @skipUnlessDBFeature("supports_expression_indexes") def test_func_unique_constraint_pointing_to_m2m_field(self): class Model(models.Model): m2m = models.ManyToManyField("self") class Meta: constraints = [models.UniqueConstraint(Lower("m2m"), name="name")] self.assertEqual( Model.check(databases=self.databases), [ Error( "'constraints' refers to a ManyToManyField 'm2m', but " "ManyToManyFields are not permitted in 'constraints'.", obj=Model, id="models.E013", ), ], ) @skipUnlessDBFeature("supports_expression_indexes") def test_func_unique_constraint_pointing_to_non_local_field(self): class Foo(models.Model): field1 = models.CharField(max_length=15) class Bar(Foo): class Meta: constraints = [models.UniqueConstraint(Lower("field1"), name="name")] self.assertEqual( Bar.check(databases=self.databases), [ Error( "'constraints' refers to field 'field1' which is not local to " "model 'Bar'.", hint="This issue may be caused by multi-table inheritance.", obj=Bar, id="models.E016", ), ], ) @skipUnlessDBFeature("supports_expression_indexes") def test_func_unique_constraint_pointing_to_fk(self): class Foo(models.Model): id = models.CharField(primary_key=True, max_length=255) class Bar(models.Model): foo_1 = models.ForeignKey(Foo, models.CASCADE, related_name="bar_1") foo_2 = models.ForeignKey(Foo, models.CASCADE, related_name="bar_2") class Meta: constraints = [ models.UniqueConstraint( Lower("foo_1_id"), Lower("foo_2"), name="name", ), ] self.assertEqual(Bar.check(databases=self.databases), [])
fbabf024d41395515166859c4479f16523ce7b600b78943d29a3c377186d09fd
import json import tempfile import uuid from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation from django.contrib.contenttypes.models import ContentType from django.core.files.storage import FileSystemStorage from django.core.serializers.json import DjangoJSONEncoder from django.db import models from django.db.models.fields.files import ImageFieldFile from django.utils.translation import gettext_lazy as _ try: from PIL import Image except ImportError: Image = None class Foo(models.Model): a = models.CharField(max_length=10) d = models.DecimalField(max_digits=5, decimal_places=3) def get_foo(): return Foo.objects.get(id=1).pk class Bar(models.Model): b = models.CharField(max_length=10) a = models.ForeignKey(Foo, models.CASCADE, default=get_foo, related_name="bars") class Whiz(models.Model): CHOICES = ( ( "Group 1", ( (1, "First"), (2, "Second"), ), ), ( "Group 2", ( (3, "Third"), (4, "Fourth"), ), ), (0, "Other"), (5, _("translated")), ) c = models.IntegerField(choices=CHOICES, null=True) class WhizDelayed(models.Model): c = models.IntegerField(choices=(), null=True) # Contrived way of adding choices later. WhizDelayed._meta.get_field("c").choices = Whiz.CHOICES class WhizIter(models.Model): c = models.IntegerField(choices=iter(Whiz.CHOICES), null=True) class WhizIterEmpty(models.Model): c = models.CharField(choices=iter(()), blank=True, max_length=1) class Choiceful(models.Model): no_choices = models.IntegerField(null=True) empty_choices = models.IntegerField(choices=(), null=True) with_choices = models.IntegerField(choices=[(1, "A")], null=True) empty_choices_bool = models.BooleanField(choices=()) empty_choices_text = models.TextField(choices=()) class BigD(models.Model): d = models.DecimalField(max_digits=32, decimal_places=30) class FloatModel(models.Model): size = models.FloatField() class BigS(models.Model): s = models.SlugField(max_length=255) class UnicodeSlugField(models.Model): s = models.SlugField(max_length=255, allow_unicode=True) class AutoModel(models.Model): value = models.AutoField(primary_key=True) class BigAutoModel(models.Model): value = models.BigAutoField(primary_key=True) class SmallAutoModel(models.Model): value = models.SmallAutoField(primary_key=True) class SmallIntegerModel(models.Model): value = models.SmallIntegerField() class IntegerModel(models.Model): value = models.IntegerField() class BigIntegerModel(models.Model): value = models.BigIntegerField() null_value = models.BigIntegerField(null=True, blank=True) class PositiveBigIntegerModel(models.Model): value = models.PositiveBigIntegerField() class PositiveSmallIntegerModel(models.Model): value = models.PositiveSmallIntegerField() class PositiveIntegerModel(models.Model): value = models.PositiveIntegerField() class Post(models.Model): title = models.CharField(max_length=100) body = models.TextField() class NullBooleanModel(models.Model): nbfield = models.BooleanField(null=True, blank=True) class BooleanModel(models.Model): bfield = models.BooleanField() class DateTimeModel(models.Model): d = models.DateField() dt = models.DateTimeField() t = models.TimeField() class DurationModel(models.Model): field = models.DurationField() class NullDurationModel(models.Model): field = models.DurationField(null=True) class PrimaryKeyCharModel(models.Model): string = models.CharField(max_length=10, primary_key=True) class FksToBooleans(models.Model): """Model with FKs to models with {Null,}BooleanField's, #15040""" bf = models.ForeignKey(BooleanModel, models.CASCADE) nbf = models.ForeignKey(NullBooleanModel, models.CASCADE) class FkToChar(models.Model): """Model with FK to a model with a CharField primary key, #19299""" out = models.ForeignKey(PrimaryKeyCharModel, models.CASCADE) class RenamedField(models.Model): modelname = models.IntegerField(name="fieldname", choices=((1, "One"),)) class VerboseNameField(models.Model): id = models.AutoField("verbose pk", primary_key=True) field1 = models.BigIntegerField("verbose field1") field2 = models.BooleanField("verbose field2", default=False) field3 = models.CharField("verbose field3", max_length=10) field4 = models.DateField("verbose field4") field5 = models.DateTimeField("verbose field5") field6 = models.DecimalField("verbose field6", max_digits=6, decimal_places=1) field7 = models.EmailField("verbose field7") field8 = models.FileField("verbose field8", upload_to="unused") field9 = models.FilePathField("verbose field9") field10 = models.FloatField("verbose field10") # Don't want to depend on Pillow in this test # field_image = models.ImageField("verbose field") field11 = models.IntegerField("verbose field11") field12 = models.GenericIPAddressField("verbose field12", protocol="ipv4") field13 = models.PositiveIntegerField("verbose field13") field14 = models.PositiveSmallIntegerField("verbose field14") field15 = models.SlugField("verbose field15") field16 = models.SmallIntegerField("verbose field16") field17 = models.TextField("verbose field17") field18 = models.TimeField("verbose field18") field19 = models.URLField("verbose field19") field20 = models.UUIDField("verbose field20") field21 = models.DurationField("verbose field21") class GenericIPAddress(models.Model): ip = models.GenericIPAddressField(null=True, protocol="ipv4") ############################################################################### # These models aren't used in any test, just here to ensure they validate # successfully. # See ticket #16570. class DecimalLessThanOne(models.Model): d = models.DecimalField(max_digits=3, decimal_places=3) # See ticket #18389. class FieldClassAttributeModel(models.Model): field_class = models.CharField ############################################################################### class DataModel(models.Model): short_data = models.BinaryField(max_length=10, default=b"\x08") data = models.BinaryField() ############################################################################### # FileField class Document(models.Model): myfile = models.FileField(upload_to="unused", unique=True) ############################################################################### # ImageField # If Pillow available, do these tests. if Image: class TestImageFieldFile(ImageFieldFile): """ Custom Field File class that records whether or not the underlying file was opened. """ def __init__(self, *args, **kwargs): self.was_opened = False super().__init__(*args, **kwargs) def open(self): self.was_opened = True super().open() class TestImageField(models.ImageField): attr_class = TestImageFieldFile # Set up a temp directory for file storage. temp_storage_dir = tempfile.mkdtemp() temp_storage = FileSystemStorage(temp_storage_dir) class Person(models.Model): """ Model that defines an ImageField with no dimension fields. """ name = models.CharField(max_length=50) mugshot = TestImageField(storage=temp_storage, upload_to="tests") class AbstractPersonWithHeight(models.Model): """ Abstract model that defines an ImageField with only one dimension field to make sure the dimension update is correctly run on concrete subclass instance post-initialization. """ mugshot = TestImageField( storage=temp_storage, upload_to="tests", height_field="mugshot_height" ) mugshot_height = models.PositiveSmallIntegerField() class Meta: abstract = True class PersonWithHeight(AbstractPersonWithHeight): """ Concrete model that subclass an abstract one with only on dimension field. """ name = models.CharField(max_length=50) class PersonWithHeightAndWidth(models.Model): """ Model that defines height and width fields after the ImageField. """ name = models.CharField(max_length=50) mugshot = TestImageField( storage=temp_storage, upload_to="tests", height_field="mugshot_height", width_field="mugshot_width", ) mugshot_height = models.PositiveSmallIntegerField() mugshot_width = models.PositiveSmallIntegerField() class PersonDimensionsFirst(models.Model): """ Model that defines height and width fields before the ImageField. """ name = models.CharField(max_length=50) mugshot_height = models.PositiveSmallIntegerField() mugshot_width = models.PositiveSmallIntegerField() mugshot = TestImageField( storage=temp_storage, upload_to="tests", height_field="mugshot_height", width_field="mugshot_width", ) class PersonTwoImages(models.Model): """ Model that: * Defines two ImageFields * Defines the height/width fields before the ImageFields * Has a nullable ImageField """ name = models.CharField(max_length=50) mugshot_height = models.PositiveSmallIntegerField() mugshot_width = models.PositiveSmallIntegerField() mugshot = TestImageField( storage=temp_storage, upload_to="tests", height_field="mugshot_height", width_field="mugshot_width", ) headshot_height = models.PositiveSmallIntegerField(blank=True, null=True) headshot_width = models.PositiveSmallIntegerField(blank=True, null=True) headshot = TestImageField( blank=True, null=True, storage=temp_storage, upload_to="tests", height_field="headshot_height", width_field="headshot_width", ) class CustomJSONDecoder(json.JSONDecoder): def __init__(self, object_hook=None, *args, **kwargs): return super().__init__(object_hook=self.as_uuid, *args, **kwargs) def as_uuid(self, dct): if "uuid" in dct: dct["uuid"] = uuid.UUID(dct["uuid"]) return dct class JSONModel(models.Model): value = models.JSONField() class Meta: required_db_features = {"supports_json_field"} class NullableJSONModel(models.Model): value = models.JSONField(blank=True, null=True) value_custom = models.JSONField( encoder=DjangoJSONEncoder, decoder=CustomJSONDecoder, null=True, ) class Meta: required_db_features = {"supports_json_field"} class RelatedJSONModel(models.Model): value = models.JSONField() json_model = models.ForeignKey(NullableJSONModel, models.CASCADE) class Meta: required_db_features = {"supports_json_field"} class AllFieldsModel(models.Model): big_integer = models.BigIntegerField() binary = models.BinaryField() boolean = models.BooleanField(default=False) char = models.CharField(max_length=10) date = models.DateField() datetime = models.DateTimeField() decimal = models.DecimalField(decimal_places=2, max_digits=2) duration = models.DurationField() email = models.EmailField() file_path = models.FilePathField() floatf = models.FloatField() integer = models.IntegerField() generic_ip = models.GenericIPAddressField() positive_integer = models.PositiveIntegerField() positive_small_integer = models.PositiveSmallIntegerField() slug = models.SlugField() small_integer = models.SmallIntegerField() text = models.TextField() time = models.TimeField() url = models.URLField() uuid = models.UUIDField() fo = models.ForeignObject( "self", on_delete=models.CASCADE, from_fields=["positive_integer"], to_fields=["id"], related_name="reverse", ) fk = models.ForeignKey("self", models.CASCADE, related_name="reverse2") m2m = models.ManyToManyField("self") oto = models.OneToOneField("self", models.CASCADE) object_id = models.PositiveIntegerField() content_type = models.ForeignKey(ContentType, models.CASCADE) gfk = GenericForeignKey() gr = GenericRelation(DataModel) class ManyToMany(models.Model): m2m = models.ManyToManyField("self") ############################################################################### class UUIDModel(models.Model): field = models.UUIDField() class NullableUUIDModel(models.Model): field = models.UUIDField(blank=True, null=True) class PrimaryKeyUUIDModel(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4) class RelatedToUUIDModel(models.Model): uuid_fk = models.ForeignKey("PrimaryKeyUUIDModel", models.CASCADE) class UUIDChild(PrimaryKeyUUIDModel): pass class UUIDGrandchild(UUIDChild): pass
36e25b8b4ca1930b1307c2540669f2f7abd636404c5f762175390cdff69b0fa7
from unittest import mock from django.core.exceptions import ValidationError from django.db import IntegrityError, connection, models from django.db.models import F from django.db.models.constraints import BaseConstraint from django.db.models.functions import Lower from django.db.transaction import atomic from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature from .models import ( ChildModel, ChildUniqueConstraintProduct, Product, UniqueConstraintConditionProduct, UniqueConstraintDeferrable, UniqueConstraintInclude, UniqueConstraintProduct, ) def get_constraints(table): with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) class BaseConstraintTests(SimpleTestCase): def test_constraint_sql(self): c = BaseConstraint("name") msg = "This method must be implemented by a subclass." with self.assertRaisesMessage(NotImplementedError, msg): c.constraint_sql(None, None) def test_contains_expressions(self): c = BaseConstraint("name") self.assertIs(c.contains_expressions, False) def test_create_sql(self): c = BaseConstraint("name") msg = "This method must be implemented by a subclass." with self.assertRaisesMessage(NotImplementedError, msg): c.create_sql(None, None) def test_remove_sql(self): c = BaseConstraint("name") msg = "This method must be implemented by a subclass." with self.assertRaisesMessage(NotImplementedError, msg): c.remove_sql(None, None) def test_validate(self): c = BaseConstraint("name") msg = "This method must be implemented by a subclass." with self.assertRaisesMessage(NotImplementedError, msg): c.validate(None, None) def test_default_violation_error_message(self): c = BaseConstraint("name") self.assertEqual( c.get_violation_error_message(), "Constraint “name” is violated." ) def test_custom_violation_error_message(self): c = BaseConstraint( "base_name", violation_error_message="custom %(name)s message" ) self.assertEqual(c.get_violation_error_message(), "custom base_name message") def test_custom_violation_error_message_clone(self): constraint = BaseConstraint( "base_name", violation_error_message="custom %(name)s message", ).clone() self.assertEqual( constraint.get_violation_error_message(), "custom base_name message", ) def test_deconstruction(self): constraint = BaseConstraint( "base_name", violation_error_message="custom %(name)s message", ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, "django.db.models.BaseConstraint") self.assertEqual(args, ()) self.assertEqual( kwargs, {"name": "base_name", "violation_error_message": "custom %(name)s message"}, ) class CheckConstraintTests(TestCase): def test_eq(self): check1 = models.Q(price__gt=models.F("discounted_price")) check2 = models.Q(price__lt=models.F("discounted_price")) self.assertEqual( models.CheckConstraint(check=check1, name="price"), models.CheckConstraint(check=check1, name="price"), ) self.assertEqual(models.CheckConstraint(check=check1, name="price"), mock.ANY) self.assertNotEqual( models.CheckConstraint(check=check1, name="price"), models.CheckConstraint(check=check1, name="price2"), ) self.assertNotEqual( models.CheckConstraint(check=check1, name="price"), models.CheckConstraint(check=check2, name="price"), ) self.assertNotEqual(models.CheckConstraint(check=check1, name="price"), 1) self.assertNotEqual( models.CheckConstraint(check=check1, name="price"), models.CheckConstraint( check=check1, name="price", violation_error_message="custom error" ), ) self.assertNotEqual( models.CheckConstraint( check=check1, name="price", violation_error_message="custom error" ), models.CheckConstraint( check=check1, name="price", violation_error_message="other custom error" ), ) self.assertEqual( models.CheckConstraint( check=check1, name="price", violation_error_message="custom error" ), models.CheckConstraint( check=check1, name="price", violation_error_message="custom error" ), ) def test_repr(self): constraint = models.CheckConstraint( check=models.Q(price__gt=models.F("discounted_price")), name="price_gt_discounted_price", ) self.assertEqual( repr(constraint), "<CheckConstraint: check=(AND: ('price__gt', F(discounted_price))) " "name='price_gt_discounted_price'>", ) def test_invalid_check_types(self): msg = "CheckConstraint.check must be a Q instance or boolean expression." with self.assertRaisesMessage(TypeError, msg): models.CheckConstraint(check=models.F("discounted_price"), name="check") def test_deconstruction(self): check = models.Q(price__gt=models.F("discounted_price")) name = "price_gt_discounted_price" constraint = models.CheckConstraint(check=check, name=name) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, "django.db.models.CheckConstraint") self.assertEqual(args, ()) self.assertEqual(kwargs, {"check": check, "name": name}) @skipUnlessDBFeature("supports_table_check_constraints") def test_database_constraint(self): Product.objects.create(price=10, discounted_price=5) with self.assertRaises(IntegrityError): Product.objects.create(price=10, discounted_price=20) @skipUnlessDBFeature("supports_table_check_constraints") def test_database_constraint_unicode(self): Product.objects.create(price=10, discounted_price=5, unit="μg/mL") with self.assertRaises(IntegrityError): Product.objects.create(price=10, discounted_price=7, unit="l") @skipUnlessDBFeature( "supports_table_check_constraints", "can_introspect_check_constraints" ) def test_name(self): constraints = get_constraints(Product._meta.db_table) for expected_name in ( "price_gt_discounted_price", "constraints_product_price_gt_0", ): with self.subTest(expected_name): self.assertIn(expected_name, constraints) @skipUnlessDBFeature( "supports_table_check_constraints", "can_introspect_check_constraints" ) def test_abstract_name(self): constraints = get_constraints(ChildModel._meta.db_table) self.assertIn("constraints_childmodel_adult", constraints) def test_validate(self): check = models.Q(price__gt=models.F("discounted_price")) constraint = models.CheckConstraint(check=check, name="price") # Invalid product. invalid_product = Product(price=10, discounted_price=42) with self.assertRaises(ValidationError): constraint.validate(Product, invalid_product) with self.assertRaises(ValidationError): constraint.validate(Product, invalid_product, exclude={"unit"}) # Fields used by the check constraint are excluded. constraint.validate(Product, invalid_product, exclude={"price"}) constraint.validate(Product, invalid_product, exclude={"discounted_price"}) constraint.validate( Product, invalid_product, exclude={"discounted_price", "price"}, ) # Valid product. constraint.validate(Product, Product(price=10, discounted_price=5)) def test_validate_boolean_expressions(self): constraint = models.CheckConstraint( check=models.expressions.ExpressionWrapper( models.Q(price__gt=500) | models.Q(price__lt=500), output_field=models.BooleanField(), ), name="price_neq_500_wrap", ) msg = f"Constraint “{constraint.name}” is violated." with self.assertRaisesMessage(ValidationError, msg): constraint.validate(Product, Product(price=500, discounted_price=5)) constraint.validate(Product, Product(price=501, discounted_price=5)) constraint.validate(Product, Product(price=499, discounted_price=5)) def test_validate_rawsql_expressions_noop(self): constraint = models.CheckConstraint( check=models.expressions.RawSQL( "price < %s OR price > %s", (500, 500), output_field=models.BooleanField(), ), name="price_neq_500_raw", ) # RawSQL can not be checked and is always considered valid. constraint.validate(Product, Product(price=500, discounted_price=5)) constraint.validate(Product, Product(price=501, discounted_price=5)) constraint.validate(Product, Product(price=499, discounted_price=5)) @skipUnlessDBFeature("supports_comparing_boolean_expr") def test_validate_nullable_field_with_none(self): # Nullable fields should be considered valid on None values. constraint = models.CheckConstraint( check=models.Q(price__gte=0), name="positive_price", ) constraint.validate(Product, Product()) @skipIfDBFeature("supports_comparing_boolean_expr") def test_validate_nullable_field_with_isnull(self): constraint = models.CheckConstraint( check=models.Q(price__gte=0) | models.Q(price__isnull=True), name="positive_price", ) constraint.validate(Product, Product()) class UniqueConstraintTests(TestCase): @classmethod def setUpTestData(cls): cls.p1 = UniqueConstraintProduct.objects.create(name="p1", color="red") cls.p2 = UniqueConstraintProduct.objects.create(name="p2") def test_eq(self): self.assertEqual( models.UniqueConstraint(fields=["foo", "bar"], name="unique"), models.UniqueConstraint(fields=["foo", "bar"], name="unique"), ) self.assertEqual( models.UniqueConstraint(fields=["foo", "bar"], name="unique"), mock.ANY, ) self.assertNotEqual( models.UniqueConstraint(fields=["foo", "bar"], name="unique"), models.UniqueConstraint(fields=["foo", "bar"], name="unique2"), ) self.assertNotEqual( models.UniqueConstraint(fields=["foo", "bar"], name="unique"), models.UniqueConstraint(fields=["foo", "baz"], name="unique"), ) self.assertNotEqual( models.UniqueConstraint(fields=["foo", "bar"], name="unique"), 1 ) self.assertNotEqual( models.UniqueConstraint(fields=["foo", "bar"], name="unique"), models.UniqueConstraint( fields=["foo", "bar"], name="unique", violation_error_message="custom error", ), ) self.assertNotEqual( models.UniqueConstraint( fields=["foo", "bar"], name="unique", violation_error_message="custom error", ), models.UniqueConstraint( fields=["foo", "bar"], name="unique", violation_error_message="other custom error", ), ) self.assertEqual( models.UniqueConstraint( fields=["foo", "bar"], name="unique", violation_error_message="custom error", ), models.UniqueConstraint( fields=["foo", "bar"], name="unique", violation_error_message="custom error", ), ) def test_eq_with_condition(self): self.assertEqual( models.UniqueConstraint( fields=["foo", "bar"], name="unique", condition=models.Q(foo=models.F("bar")), ), models.UniqueConstraint( fields=["foo", "bar"], name="unique", condition=models.Q(foo=models.F("bar")), ), ) self.assertNotEqual( models.UniqueConstraint( fields=["foo", "bar"], name="unique", condition=models.Q(foo=models.F("bar")), ), models.UniqueConstraint( fields=["foo", "bar"], name="unique", condition=models.Q(foo=models.F("baz")), ), ) def test_eq_with_deferrable(self): constraint_1 = models.UniqueConstraint( fields=["foo", "bar"], name="unique", deferrable=models.Deferrable.DEFERRED, ) constraint_2 = models.UniqueConstraint( fields=["foo", "bar"], name="unique", deferrable=models.Deferrable.IMMEDIATE, ) self.assertEqual(constraint_1, constraint_1) self.assertNotEqual(constraint_1, constraint_2) def test_eq_with_include(self): constraint_1 = models.UniqueConstraint( fields=["foo", "bar"], name="include", include=["baz_1"], ) constraint_2 = models.UniqueConstraint( fields=["foo", "bar"], name="include", include=["baz_2"], ) self.assertEqual(constraint_1, constraint_1) self.assertNotEqual(constraint_1, constraint_2) def test_eq_with_opclasses(self): constraint_1 = models.UniqueConstraint( fields=["foo", "bar"], name="opclasses", opclasses=["text_pattern_ops", "varchar_pattern_ops"], ) constraint_2 = models.UniqueConstraint( fields=["foo", "bar"], name="opclasses", opclasses=["varchar_pattern_ops", "text_pattern_ops"], ) self.assertEqual(constraint_1, constraint_1) self.assertNotEqual(constraint_1, constraint_2) def test_eq_with_expressions(self): constraint = models.UniqueConstraint( Lower("title"), F("author"), name="book_func_uq", ) same_constraint = models.UniqueConstraint( Lower("title"), "author", name="book_func_uq", ) another_constraint = models.UniqueConstraint( Lower("title"), name="book_func_uq", ) self.assertEqual(constraint, same_constraint) self.assertEqual(constraint, mock.ANY) self.assertNotEqual(constraint, another_constraint) def test_repr(self): fields = ["foo", "bar"] name = "unique_fields" constraint = models.UniqueConstraint(fields=fields, name=name) self.assertEqual( repr(constraint), "<UniqueConstraint: fields=('foo', 'bar') name='unique_fields'>", ) def test_repr_with_condition(self): constraint = models.UniqueConstraint( fields=["foo", "bar"], name="unique_fields", condition=models.Q(foo=models.F("bar")), ) self.assertEqual( repr(constraint), "<UniqueConstraint: fields=('foo', 'bar') name='unique_fields' " "condition=(AND: ('foo', F(bar)))>", ) def test_repr_with_deferrable(self): constraint = models.UniqueConstraint( fields=["foo", "bar"], name="unique_fields", deferrable=models.Deferrable.IMMEDIATE, ) self.assertEqual( repr(constraint), "<UniqueConstraint: fields=('foo', 'bar') name='unique_fields' " "deferrable=Deferrable.IMMEDIATE>", ) def test_repr_with_include(self): constraint = models.UniqueConstraint( fields=["foo", "bar"], name="include_fields", include=["baz_1", "baz_2"], ) self.assertEqual( repr(constraint), "<UniqueConstraint: fields=('foo', 'bar') name='include_fields' " "include=('baz_1', 'baz_2')>", ) def test_repr_with_opclasses(self): constraint = models.UniqueConstraint( fields=["foo", "bar"], name="opclasses_fields", opclasses=["text_pattern_ops", "varchar_pattern_ops"], ) self.assertEqual( repr(constraint), "<UniqueConstraint: fields=('foo', 'bar') name='opclasses_fields' " "opclasses=['text_pattern_ops', 'varchar_pattern_ops']>", ) def test_repr_with_expressions(self): constraint = models.UniqueConstraint( Lower("title"), F("author"), name="book_func_uq", ) self.assertEqual( repr(constraint), "<UniqueConstraint: expressions=(Lower(F(title)), F(author)) " "name='book_func_uq'>", ) def test_deconstruction(self): fields = ["foo", "bar"] name = "unique_fields" constraint = models.UniqueConstraint(fields=fields, name=name) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, "django.db.models.UniqueConstraint") self.assertEqual(args, ()) self.assertEqual(kwargs, {"fields": tuple(fields), "name": name}) def test_deconstruction_with_condition(self): fields = ["foo", "bar"] name = "unique_fields" condition = models.Q(foo=models.F("bar")) constraint = models.UniqueConstraint( fields=fields, name=name, condition=condition ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, "django.db.models.UniqueConstraint") self.assertEqual(args, ()) self.assertEqual( kwargs, {"fields": tuple(fields), "name": name, "condition": condition} ) def test_deconstruction_with_deferrable(self): fields = ["foo"] name = "unique_fields" constraint = models.UniqueConstraint( fields=fields, name=name, deferrable=models.Deferrable.DEFERRED, ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, "django.db.models.UniqueConstraint") self.assertEqual(args, ()) self.assertEqual( kwargs, { "fields": tuple(fields), "name": name, "deferrable": models.Deferrable.DEFERRED, }, ) def test_deconstruction_with_include(self): fields = ["foo", "bar"] name = "unique_fields" include = ["baz_1", "baz_2"] constraint = models.UniqueConstraint(fields=fields, name=name, include=include) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, "django.db.models.UniqueConstraint") self.assertEqual(args, ()) self.assertEqual( kwargs, { "fields": tuple(fields), "name": name, "include": tuple(include), }, ) def test_deconstruction_with_opclasses(self): fields = ["foo", "bar"] name = "unique_fields" opclasses = ["varchar_pattern_ops", "text_pattern_ops"] constraint = models.UniqueConstraint( fields=fields, name=name, opclasses=opclasses ) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, "django.db.models.UniqueConstraint") self.assertEqual(args, ()) self.assertEqual( kwargs, { "fields": tuple(fields), "name": name, "opclasses": opclasses, }, ) def test_deconstruction_with_expressions(self): name = "unique_fields" constraint = models.UniqueConstraint(Lower("title"), name=name) path, args, kwargs = constraint.deconstruct() self.assertEqual(path, "django.db.models.UniqueConstraint") self.assertEqual(args, (Lower("title"),)) self.assertEqual(kwargs, {"name": name}) def test_database_constraint(self): with self.assertRaises(IntegrityError): UniqueConstraintProduct.objects.create( name=self.p1.name, color=self.p1.color ) @skipUnlessDBFeature("supports_partial_indexes") def test_database_constraint_with_condition(self): UniqueConstraintConditionProduct.objects.create(name="p1") UniqueConstraintConditionProduct.objects.create(name="p2") with self.assertRaises(IntegrityError): UniqueConstraintConditionProduct.objects.create(name="p1") def test_model_validation(self): msg = "Unique constraint product with this Name and Color already exists." with self.assertRaisesMessage(ValidationError, msg): UniqueConstraintProduct( name=self.p1.name, color=self.p1.color ).validate_constraints() @skipUnlessDBFeature("supports_partial_indexes") def test_model_validation_with_condition(self): """ Partial unique constraints are not ignored by Model.validate_constraints(). """ obj1 = UniqueConstraintConditionProduct.objects.create(name="p1", color="red") obj2 = UniqueConstraintConditionProduct.objects.create(name="p2") UniqueConstraintConditionProduct( name=obj1.name, color="blue" ).validate_constraints() msg = "Constraint “name_without_color_uniq” is violated." with self.assertRaisesMessage(ValidationError, msg): UniqueConstraintConditionProduct(name=obj2.name).validate_constraints() def test_validate(self): constraint = UniqueConstraintProduct._meta.constraints[0] msg = "Unique constraint product with this Name and Color already exists." non_unique_product = UniqueConstraintProduct( name=self.p1.name, color=self.p1.color ) with self.assertRaisesMessage(ValidationError, msg): constraint.validate(UniqueConstraintProduct, non_unique_product) # Null values are ignored. constraint.validate( UniqueConstraintProduct, UniqueConstraintProduct(name=self.p2.name, color=None), ) # Existing instances have their existing row excluded. constraint.validate(UniqueConstraintProduct, self.p1) # Unique fields are excluded. constraint.validate( UniqueConstraintProduct, non_unique_product, exclude={"name"}, ) constraint.validate( UniqueConstraintProduct, non_unique_product, exclude={"color"}, ) constraint.validate( UniqueConstraintProduct, non_unique_product, exclude={"name", "color"}, ) # Validation on a child instance. with self.assertRaisesMessage(ValidationError, msg): constraint.validate( UniqueConstraintProduct, ChildUniqueConstraintProduct(name=self.p1.name, color=self.p1.color), ) @skipUnlessDBFeature("supports_partial_indexes") def test_validate_condition(self): p1 = UniqueConstraintConditionProduct.objects.create(name="p1") constraint = UniqueConstraintConditionProduct._meta.constraints[0] msg = "Constraint “name_without_color_uniq” is violated." with self.assertRaisesMessage(ValidationError, msg): constraint.validate( UniqueConstraintConditionProduct, UniqueConstraintConditionProduct(name=p1.name, color=None), ) # Values not matching condition are ignored. constraint.validate( UniqueConstraintConditionProduct, UniqueConstraintConditionProduct(name=p1.name, color="anything-but-none"), ) # Existing instances have their existing row excluded. constraint.validate(UniqueConstraintConditionProduct, p1) # Unique field is excluded. constraint.validate( UniqueConstraintConditionProduct, UniqueConstraintConditionProduct(name=p1.name, color=None), exclude={"name"}, ) def test_validate_expression(self): constraint = models.UniqueConstraint(Lower("name"), name="name_lower_uniq") msg = "Constraint “name_lower_uniq” is violated." with self.assertRaisesMessage(ValidationError, msg): constraint.validate( UniqueConstraintProduct, UniqueConstraintProduct(name=self.p1.name.upper()), ) constraint.validate( UniqueConstraintProduct, UniqueConstraintProduct(name="another-name"), ) # Existing instances have their existing row excluded. constraint.validate(UniqueConstraintProduct, self.p1) # Unique field is excluded. constraint.validate( UniqueConstraintProduct, UniqueConstraintProduct(name=self.p1.name.upper()), exclude={"name"}, ) def test_validate_ordered_expression(self): constraint = models.UniqueConstraint( Lower("name").desc(), name="name_lower_uniq_desc" ) msg = "Constraint “name_lower_uniq_desc” is violated." with self.assertRaisesMessage(ValidationError, msg): constraint.validate( UniqueConstraintProduct, UniqueConstraintProduct(name=self.p1.name.upper()), ) constraint.validate( UniqueConstraintProduct, UniqueConstraintProduct(name="another-name"), ) # Existing instances have their existing row excluded. constraint.validate(UniqueConstraintProduct, self.p1) # Unique field is excluded. constraint.validate( UniqueConstraintProduct, UniqueConstraintProduct(name=self.p1.name.upper()), exclude={"name"}, ) def test_validate_expression_condition(self): constraint = models.UniqueConstraint( Lower("name"), name="name_lower_without_color_uniq", condition=models.Q(color__isnull=True), ) non_unique_product = UniqueConstraintProduct(name=self.p2.name.upper()) msg = "Constraint “name_lower_without_color_uniq” is violated." with self.assertRaisesMessage(ValidationError, msg): constraint.validate(UniqueConstraintProduct, non_unique_product) # Values not matching condition are ignored. constraint.validate( UniqueConstraintProduct, UniqueConstraintProduct(name=self.p1.name, color=self.p1.color), ) # Existing instances have their existing row excluded. constraint.validate(UniqueConstraintProduct, self.p2) # Unique field is excluded. constraint.validate( UniqueConstraintProduct, non_unique_product, exclude={"name"}, ) # Field from a condition is excluded. constraint.validate( UniqueConstraintProduct, non_unique_product, exclude={"color"}, ) def test_validate_expression_str(self): constraint = models.UniqueConstraint("name", name="name_uniq") msg = "Constraint “name_uniq” is violated." with self.assertRaisesMessage(ValidationError, msg): constraint.validate( UniqueConstraintProduct, UniqueConstraintProduct(name=self.p1.name), ) constraint.validate( UniqueConstraintProduct, UniqueConstraintProduct(name=self.p1.name), exclude={"name"}, ) def test_name(self): constraints = get_constraints(UniqueConstraintProduct._meta.db_table) expected_name = "name_color_uniq" self.assertIn(expected_name, constraints) def test_condition_must_be_q(self): with self.assertRaisesMessage( ValueError, "UniqueConstraint.condition must be a Q instance." ): models.UniqueConstraint(name="uniq", fields=["name"], condition="invalid") @skipUnlessDBFeature("supports_deferrable_unique_constraints") def test_initially_deferred_database_constraint(self): obj_1 = UniqueConstraintDeferrable.objects.create(name="p1", shelf="front") obj_2 = UniqueConstraintDeferrable.objects.create(name="p2", shelf="back") def swap(): obj_1.name, obj_2.name = obj_2.name, obj_1.name obj_1.save() obj_2.save() swap() # Behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with atomic(), connection.cursor() as cursor: constraint_name = connection.ops.quote_name("name_init_deferred_uniq") cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % constraint_name) swap() @skipUnlessDBFeature("supports_deferrable_unique_constraints") def test_initially_immediate_database_constraint(self): obj_1 = UniqueConstraintDeferrable.objects.create(name="p1", shelf="front") obj_2 = UniqueConstraintDeferrable.objects.create(name="p2", shelf="back") obj_1.shelf, obj_2.shelf = obj_2.shelf, obj_1.shelf with self.assertRaises(IntegrityError), atomic(): obj_1.save() # Behavior can be changed with SET CONSTRAINTS. with connection.cursor() as cursor: constraint_name = connection.ops.quote_name("sheld_init_immediate_uniq") cursor.execute("SET CONSTRAINTS %s DEFERRED" % constraint_name) obj_1.save() obj_2.save() def test_deferrable_with_condition(self): message = "UniqueConstraint with conditions cannot be deferred." with self.assertRaisesMessage(ValueError, message): models.UniqueConstraint( fields=["name"], name="name_without_color_unique", condition=models.Q(color__isnull=True), deferrable=models.Deferrable.DEFERRED, ) def test_deferrable_with_include(self): message = "UniqueConstraint with include fields cannot be deferred." with self.assertRaisesMessage(ValueError, message): models.UniqueConstraint( fields=["name"], name="name_inc_color_color_unique", include=["color"], deferrable=models.Deferrable.DEFERRED, ) def test_deferrable_with_opclasses(self): message = "UniqueConstraint with opclasses cannot be deferred." with self.assertRaisesMessage(ValueError, message): models.UniqueConstraint( fields=["name"], name="name_text_pattern_ops_unique", opclasses=["text_pattern_ops"], deferrable=models.Deferrable.DEFERRED, ) def test_deferrable_with_expressions(self): message = "UniqueConstraint with expressions cannot be deferred." with self.assertRaisesMessage(ValueError, message): models.UniqueConstraint( Lower("name"), name="deferred_expression_unique", deferrable=models.Deferrable.DEFERRED, ) def test_invalid_defer_argument(self): message = "UniqueConstraint.deferrable must be a Deferrable instance." with self.assertRaisesMessage(ValueError, message): models.UniqueConstraint( fields=["name"], name="name_invalid", deferrable="invalid", ) @skipUnlessDBFeature( "supports_table_check_constraints", "supports_covering_indexes", ) def test_include_database_constraint(self): UniqueConstraintInclude.objects.create(name="p1", color="red") with self.assertRaises(IntegrityError): UniqueConstraintInclude.objects.create(name="p1", color="blue") def test_invalid_include_argument(self): msg = "UniqueConstraint.include must be a list or tuple." with self.assertRaisesMessage(ValueError, msg): models.UniqueConstraint( name="uniq_include", fields=["field"], include="other", ) def test_invalid_opclasses_argument(self): msg = "UniqueConstraint.opclasses must be a list or tuple." with self.assertRaisesMessage(ValueError, msg): models.UniqueConstraint( name="uniq_opclasses", fields=["field"], opclasses="jsonb_path_ops", ) def test_opclasses_and_fields_same_length(self): msg = ( "UniqueConstraint.fields and UniqueConstraint.opclasses must have " "the same number of elements." ) with self.assertRaisesMessage(ValueError, msg): models.UniqueConstraint( name="uniq_opclasses", fields=["field"], opclasses=["foo", "bar"], ) def test_requires_field_or_expression(self): msg = ( "At least one field or expression is required to define a unique " "constraint." ) with self.assertRaisesMessage(ValueError, msg): models.UniqueConstraint(name="name") def test_expressions_and_fields_mutually_exclusive(self): msg = "UniqueConstraint.fields and expressions are mutually exclusive." with self.assertRaisesMessage(ValueError, msg): models.UniqueConstraint(Lower("field_1"), fields=["field_2"], name="name") def test_expressions_with_opclasses(self): msg = ( "UniqueConstraint.opclasses cannot be used with expressions. Use " "django.contrib.postgres.indexes.OpClass() instead." ) with self.assertRaisesMessage(ValueError, msg): models.UniqueConstraint( Lower("field"), name="test_func_opclass", opclasses=["jsonb_path_ops"], ) def test_requires_name(self): msg = "A unique constraint must be named." with self.assertRaisesMessage(ValueError, msg): models.UniqueConstraint(fields=["field"])
f48a8cfe8406416a963ebd59ba2cab44225ecd66156158b0230b9b3ddfc7cb15
import datetime import pickle import unittest import uuid from collections import namedtuple from copy import deepcopy from decimal import Decimal from unittest import mock from django.core.exceptions import FieldError from django.db import DatabaseError, NotSupportedError, connection from django.db.models import ( AutoField, Avg, BinaryField, BooleanField, Case, CharField, Count, DateField, DateTimeField, DecimalField, DurationField, Exists, Expression, ExpressionList, ExpressionWrapper, F, FloatField, Func, IntegerField, Max, Min, Model, OrderBy, OuterRef, Q, StdDev, Subquery, Sum, TimeField, UUIDField, Value, Variance, When, ) from django.db.models.expressions import ( Col, Combinable, CombinedExpression, NegatedExpression, RawSQL, Ref, ) from django.db.models.functions import ( Coalesce, Concat, Left, Length, Lower, Substr, Upper, ) from django.db.models.sql import constants from django.db.models.sql.datastructures import Join from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import ( Approximate, CaptureQueriesContext, isolate_apps, register_lookup, ) from django.utils.functional import SimpleLazyObject from .models import ( UUID, UUIDPK, Company, Employee, Experiment, Manager, Number, RemoteEmployee, Result, SimulationRun, Time, ) class BasicExpressionsTests(TestCase): @classmethod def setUpTestData(cls): cls.example_inc = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith", salary=10), ) cls.foobar_ltd = Company.objects.create( name="Foobar Ltd.", num_employees=3, num_chairs=4, based_in_eu=True, ceo=Employee.objects.create(firstname="Frank", lastname="Meyer", salary=20), ) cls.max = Employee.objects.create( firstname="Max", lastname="Mustermann", salary=30 ) cls.gmbh = Company.objects.create( name="Test GmbH", num_employees=32, num_chairs=1, ceo=cls.max ) def setUp(self): self.company_query = Company.objects.values( "name", "num_employees", "num_chairs" ).order_by("name", "num_employees", "num_chairs") def test_annotate_values_aggregate(self): companies = ( Company.objects.annotate( salaries=F("ceo__salary"), ) .values("num_employees", "salaries") .aggregate( result=Sum( F("salaries") + F("num_employees"), output_field=IntegerField() ), ) ) self.assertEqual(companies["result"], 2395) def test_annotate_values_filter(self): companies = ( Company.objects.annotate( foo=RawSQL("%s", ["value"]), ) .filter(foo="value") .order_by("name") ) self.assertSequenceEqual( companies, [self.example_inc, self.foobar_ltd, self.gmbh], ) def test_annotate_values_count(self): companies = Company.objects.annotate(foo=RawSQL("%s", ["value"])) self.assertEqual(companies.count(), 3) @skipUnlessDBFeature("supports_boolean_expr_in_select_clause") def test_filtering_on_annotate_that_uses_q(self): self.assertEqual( Company.objects.annotate( num_employees_check=ExpressionWrapper( Q(num_employees__gt=3), output_field=BooleanField() ) ) .filter(num_employees_check=True) .count(), 2, ) def test_filtering_on_q_that_is_boolean(self): self.assertEqual( Company.objects.filter( ExpressionWrapper(Q(num_employees__gt=3), output_field=BooleanField()) ).count(), 2, ) def test_filtering_on_rawsql_that_is_boolean(self): self.assertEqual( Company.objects.filter( RawSQL("num_employees > %s", (3,), output_field=BooleanField()), ).count(), 2, ) def test_filter_inter_attribute(self): # We can filter on attribute relationships on same model obj, e.g. # find companies where the number of employees is greater # than the number of chairs. self.assertSequenceEqual( self.company_query.filter(num_employees__gt=F("num_chairs")), [ { "num_chairs": 5, "name": "Example Inc.", "num_employees": 2300, }, {"num_chairs": 1, "name": "Test GmbH", "num_employees": 32}, ], ) def test_update(self): # We can set one field to have the value of another field # Make sure we have enough chairs self.company_query.update(num_chairs=F("num_employees")) self.assertSequenceEqual( self.company_query, [ {"num_chairs": 2300, "name": "Example Inc.", "num_employees": 2300}, {"num_chairs": 3, "name": "Foobar Ltd.", "num_employees": 3}, {"num_chairs": 32, "name": "Test GmbH", "num_employees": 32}, ], ) def test_arithmetic(self): # We can perform arithmetic operations in expressions # Make sure we have 2 spare chairs self.company_query.update(num_chairs=F("num_employees") + 2) self.assertSequenceEqual( self.company_query, [ {"num_chairs": 2302, "name": "Example Inc.", "num_employees": 2300}, {"num_chairs": 5, "name": "Foobar Ltd.", "num_employees": 3}, {"num_chairs": 34, "name": "Test GmbH", "num_employees": 32}, ], ) def test_order_of_operations(self): # Law of order of operations is followed self.company_query.update( num_chairs=F("num_employees") + 2 * F("num_employees") ) self.assertSequenceEqual( self.company_query, [ {"num_chairs": 6900, "name": "Example Inc.", "num_employees": 2300}, {"num_chairs": 9, "name": "Foobar Ltd.", "num_employees": 3}, {"num_chairs": 96, "name": "Test GmbH", "num_employees": 32}, ], ) def test_parenthesis_priority(self): # Law of order of operations can be overridden by parentheses self.company_query.update( num_chairs=(F("num_employees") + 2) * F("num_employees") ) self.assertSequenceEqual( self.company_query, [ {"num_chairs": 5294600, "name": "Example Inc.", "num_employees": 2300}, {"num_chairs": 15, "name": "Foobar Ltd.", "num_employees": 3}, {"num_chairs": 1088, "name": "Test GmbH", "num_employees": 32}, ], ) def test_update_with_fk(self): # ForeignKey can become updated with the value of another ForeignKey. self.assertEqual(Company.objects.update(point_of_contact=F("ceo")), 3) self.assertQuerySetEqual( Company.objects.all(), ["Joe Smith", "Frank Meyer", "Max Mustermann"], lambda c: str(c.point_of_contact), ordered=False, ) def test_update_with_none(self): Number.objects.create(integer=1, float=1.0) Number.objects.create(integer=2) Number.objects.filter(float__isnull=False).update(float=Value(None)) self.assertQuerySetEqual( Number.objects.all(), [None, None], lambda n: n.float, ordered=False ) def test_filter_with_join(self): # F Expressions can also span joins Company.objects.update(point_of_contact=F("ceo")) c = Company.objects.first() c.point_of_contact = Employee.objects.create( firstname="Guido", lastname="van Rossum" ) c.save() self.assertQuerySetEqual( Company.objects.filter(ceo__firstname=F("point_of_contact__firstname")), ["Foobar Ltd.", "Test GmbH"], lambda c: c.name, ordered=False, ) Company.objects.exclude(ceo__firstname=F("point_of_contact__firstname")).update( name="foo" ) self.assertEqual( Company.objects.exclude(ceo__firstname=F("point_of_contact__firstname")) .get() .name, "foo", ) msg = "Joined field references are not permitted in this query" with self.assertRaisesMessage(FieldError, msg): Company.objects.exclude( ceo__firstname=F("point_of_contact__firstname") ).update(name=F("point_of_contact__lastname")) def test_object_update(self): # F expressions can be used to update attributes on single objects self.gmbh.num_employees = F("num_employees") + 4 self.gmbh.save() self.gmbh.refresh_from_db() self.assertEqual(self.gmbh.num_employees, 36) def test_new_object_save(self): # We should be able to use Funcs when inserting new data test_co = Company( name=Lower(Value("UPPER")), num_employees=32, num_chairs=1, ceo=self.max ) test_co.save() test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_new_object_create(self): test_co = Company.objects.create( name=Lower(Value("UPPER")), num_employees=32, num_chairs=1, ceo=self.max ) test_co.refresh_from_db() self.assertEqual(test_co.name, "upper") def test_object_create_with_aggregate(self): # Aggregates are not allowed when inserting new data msg = ( "Aggregate functions are not allowed in this query " "(num_employees=Max(Value(1)))." ) with self.assertRaisesMessage(FieldError, msg): Company.objects.create( name="Company", num_employees=Max(Value(1)), num_chairs=1, ceo=Employee.objects.create( firstname="Just", lastname="Doit", salary=30 ), ) def test_object_update_fk(self): # F expressions cannot be used to update attributes which are foreign # keys, or attributes which involve joins. test_gmbh = Company.objects.get(pk=self.gmbh.pk) msg = 'F(ceo)": "Company.point_of_contact" must be a "Employee" instance.' with self.assertRaisesMessage(ValueError, msg): test_gmbh.point_of_contact = F("ceo") test_gmbh.point_of_contact = self.gmbh.ceo test_gmbh.save() test_gmbh.name = F("ceo__lastname") msg = "Joined field references are not permitted in this query" with self.assertRaisesMessage(FieldError, msg): test_gmbh.save() def test_update_inherited_field_value(self): msg = "Joined field references are not permitted in this query" with self.assertRaisesMessage(FieldError, msg): RemoteEmployee.objects.update(adjusted_salary=F("salary") * 5) def test_object_update_unsaved_objects(self): # F expressions cannot be used to update attributes on objects which do # not yet exist in the database acme = Company( name="The Acme Widget Co.", num_employees=12, num_chairs=5, ceo=self.max ) acme.num_employees = F("num_employees") + 16 msg = ( 'Failed to insert expression "Col(expressions_company, ' 'expressions.Company.num_employees) + Value(16)" on ' "expressions.Company.num_employees. F() expressions can only be " "used to update, not to insert." ) with self.assertRaisesMessage(ValueError, msg): acme.save() acme.num_employees = 12 acme.name = Lower(F("name")) msg = ( 'Failed to insert expression "Lower(Col(expressions_company, ' 'expressions.Company.name))" on expressions.Company.name. F() ' "expressions can only be used to update, not to insert." ) with self.assertRaisesMessage(ValueError, msg): acme.save() def test_ticket_11722_iexact_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") test = Employee.objects.create(firstname="Test", lastname="test") queryset = Employee.objects.filter(firstname__iexact=F("lastname")) self.assertSequenceEqual(queryset, [test]) def test_ticket_16731_startswith_lookup(self): Employee.objects.create(firstname="John", lastname="Doe") e2 = Employee.objects.create(firstname="Jack", lastname="Jackson") e3 = Employee.objects.create(firstname="Jack", lastname="jackson") self.assertSequenceEqual( Employee.objects.filter(lastname__startswith=F("firstname")), [e2, e3] if connection.features.has_case_insensitive_like else [e2], ) qs = Employee.objects.filter(lastname__istartswith=F("firstname")).order_by( "pk" ) self.assertSequenceEqual(qs, [e2, e3]) def test_ticket_18375_join_reuse(self): # Reverse multijoin F() references and the lookup target the same join. # Pre #18375 the F() join was generated first and the lookup couldn't # reuse that join. qs = Employee.objects.filter( company_ceo_set__num_chairs=F("company_ceo_set__num_employees") ) self.assertEqual(str(qs.query).count("JOIN"), 1) def test_ticket_18375_kwarg_ordering(self): # The next query was dict-randomization dependent - if the "gte=1" # was seen first, then the F() will reuse the join generated by the # gte lookup, if F() was seen first, then it generated a join the # other lookups could not reuse. qs = Employee.objects.filter( company_ceo_set__num_chairs=F("company_ceo_set__num_employees"), company_ceo_set__num_chairs__gte=1, ) self.assertEqual(str(qs.query).count("JOIN"), 1) def test_ticket_18375_kwarg_ordering_2(self): # Another similar case for F() than above. Now we have the same join # in two filter kwargs, one in the lhs lookup, one in F. Here pre # #18375 the amount of joins generated was random if dict # randomization was enabled, that is the generated query dependent # on which clause was seen first. qs = Employee.objects.filter( company_ceo_set__num_employees=F("pk"), pk=F("company_ceo_set__num_employees"), ) self.assertEqual(str(qs.query).count("JOIN"), 1) def test_ticket_18375_chained_filters(self): # F() expressions do not reuse joins from previous filter. qs = Employee.objects.filter(company_ceo_set__num_employees=F("pk")).filter( company_ceo_set__num_employees=F("company_ceo_set__num_employees") ) self.assertEqual(str(qs.query).count("JOIN"), 2) def test_order_by_exists(self): mary = Employee.objects.create( firstname="Mary", lastname="Mustermann", salary=20 ) mustermanns_by_seniority = Employee.objects.filter( lastname="Mustermann" ).order_by( # Order by whether the employee is the CEO of a company Exists(Company.objects.filter(ceo=OuterRef("pk"))).desc() ) self.assertSequenceEqual(mustermanns_by_seniority, [self.max, mary]) def test_order_by_multiline_sql(self): raw_order_by = ( RawSQL( """ CASE WHEN num_employees > 1000 THEN num_chairs ELSE 0 END """, [], ).desc(), RawSQL( """ CASE WHEN num_chairs > 1 THEN 1 ELSE 0 END """, [], ).asc(), ) for qs in ( Company.objects.all(), Company.objects.distinct(), ): with self.subTest(qs=qs): self.assertSequenceEqual( qs.order_by(*raw_order_by), [self.example_inc, self.gmbh, self.foobar_ltd], ) def test_outerref(self): inner = Company.objects.filter(point_of_contact=OuterRef("pk")) msg = ( "This queryset contains a reference to an outer query and may only " "be used in a subquery." ) with self.assertRaisesMessage(ValueError, msg): inner.exists() outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) self.assertIs(outer.exists(), True) def test_exist_single_field_output_field(self): queryset = Company.objects.values("pk") self.assertIsInstance(Exists(queryset).output_field, BooleanField) def test_subquery(self): Company.objects.filter(name="Example Inc.").update( point_of_contact=Employee.objects.get(firstname="Joe", lastname="Smith"), ceo=self.max, ) Employee.objects.create(firstname="Bob", lastname="Brown", salary=40) qs = ( Employee.objects.annotate( is_point_of_contact=Exists( Company.objects.filter(point_of_contact=OuterRef("pk")) ), is_not_point_of_contact=~Exists( Company.objects.filter(point_of_contact=OuterRef("pk")) ), is_ceo_of_small_company=Exists( Company.objects.filter(num_employees__lt=200, ceo=OuterRef("pk")) ), is_ceo_small_2=~~Exists( Company.objects.filter(num_employees__lt=200, ceo=OuterRef("pk")) ), largest_company=Subquery( Company.objects.order_by("-num_employees") .filter(Q(ceo=OuterRef("pk")) | Q(point_of_contact=OuterRef("pk"))) .values("name")[:1], output_field=CharField(), ), ) .values( "firstname", "is_point_of_contact", "is_not_point_of_contact", "is_ceo_of_small_company", "is_ceo_small_2", "largest_company", ) .order_by("firstname") ) results = list(qs) # Could use Coalesce(subq, Value('')) instead except for the bug in # cx_Oracle mentioned in #23843. bob = results[0] if ( bob["largest_company"] == "" and connection.features.interprets_empty_strings_as_nulls ): bob["largest_company"] = None self.assertEqual( results, [ { "firstname": "Bob", "is_point_of_contact": False, "is_not_point_of_contact": True, "is_ceo_of_small_company": False, "is_ceo_small_2": False, "largest_company": None, }, { "firstname": "Frank", "is_point_of_contact": False, "is_not_point_of_contact": True, "is_ceo_of_small_company": True, "is_ceo_small_2": True, "largest_company": "Foobar Ltd.", }, { "firstname": "Joe", "is_point_of_contact": True, "is_not_point_of_contact": False, "is_ceo_of_small_company": False, "is_ceo_small_2": False, "largest_company": "Example Inc.", }, { "firstname": "Max", "is_point_of_contact": False, "is_not_point_of_contact": True, "is_ceo_of_small_company": True, "is_ceo_small_2": True, "largest_company": "Example Inc.", }, ], ) # A less elegant way to write the same query: this uses a LEFT OUTER # JOIN and an IS NULL, inside a WHERE NOT IN which is probably less # efficient than EXISTS. self.assertCountEqual( qs.filter(is_point_of_contact=True).values("pk"), Employee.objects.exclude(company_point_of_contact_set=None).values("pk"), ) def test_subquery_eq(self): qs = Employee.objects.annotate( is_ceo=Exists(Company.objects.filter(ceo=OuterRef("pk"))), is_point_of_contact=Exists( Company.objects.filter(point_of_contact=OuterRef("pk")), ), small_company=Exists( queryset=Company.objects.filter(num_employees__lt=200), ), ).filter(is_ceo=True, is_point_of_contact=False, small_company=True) self.assertNotEqual( qs.query.annotations["is_ceo"], qs.query.annotations["is_point_of_contact"], ) self.assertNotEqual( qs.query.annotations["is_ceo"], qs.query.annotations["small_company"], ) def test_subquery_sql(self): employees = Employee.objects.all() employees_subquery = Subquery(employees) self.assertIs(employees_subquery.query.subquery, True) self.assertIs(employees.query.subquery, False) compiler = employees_subquery.query.get_compiler(connection=connection) sql, _ = employees_subquery.as_sql(compiler, connection) self.assertIn("(SELECT ", sql) def test_in_subquery(self): # This is a contrived test (and you really wouldn't write this query), # but it is a succinct way to test the __in=Subquery() construct. small_companies = Company.objects.filter(num_employees__lt=200).values("pk") subquery_test = Company.objects.filter(pk__in=Subquery(small_companies)) self.assertCountEqual(subquery_test, [self.foobar_ltd, self.gmbh]) subquery_test2 = Company.objects.filter( pk=Subquery(small_companies.filter(num_employees=3)) ) self.assertCountEqual(subquery_test2, [self.foobar_ltd]) def test_uuid_pk_subquery(self): u = UUIDPK.objects.create() UUID.objects.create(uuid_fk=u) qs = UUIDPK.objects.filter(id__in=Subquery(UUID.objects.values("uuid_fk__id"))) self.assertCountEqual(qs, [u]) def test_nested_subquery(self): inner = Company.objects.filter(point_of_contact=OuterRef("pk")) outer = Employee.objects.annotate(is_point_of_contact=Exists(inner)) contrived = Employee.objects.annotate( is_point_of_contact=Subquery( outer.filter(pk=OuterRef("pk")).values("is_point_of_contact"), output_field=BooleanField(), ), ) self.assertCountEqual(contrived.values_list(), outer.values_list()) def test_nested_subquery_join_outer_ref(self): inner = Employee.objects.filter(pk=OuterRef("ceo__pk")).values("pk") qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( ceo__in=inner, ceo__pk=OuterRef("pk"), ).values("pk"), ), ) self.assertSequenceEqual( qs.values_list("ceo_company", flat=True), [self.example_inc.pk, self.foobar_ltd.pk, self.gmbh.pk], ) def test_nested_subquery_outer_ref_2(self): first = Time.objects.create(time="09:00") second = Time.objects.create(time="17:00") third = Time.objects.create(time="21:00") SimulationRun.objects.bulk_create( [ SimulationRun(start=first, end=second, midpoint="12:00"), SimulationRun(start=first, end=third, midpoint="15:00"), SimulationRun(start=second, end=first, midpoint="00:00"), ] ) inner = Time.objects.filter( time=OuterRef(OuterRef("time")), pk=OuterRef("start") ).values("time") middle = SimulationRun.objects.annotate(other=Subquery(inner)).values("other")[ :1 ] outer = Time.objects.annotate(other=Subquery(middle, output_field=TimeField())) # This is a contrived example. It exercises the double OuterRef form. self.assertCountEqual(outer, [first, second, third]) def test_nested_subquery_outer_ref_with_autofield(self): first = Time.objects.create(time="09:00") second = Time.objects.create(time="17:00") SimulationRun.objects.create(start=first, end=second, midpoint="12:00") inner = SimulationRun.objects.filter(start=OuterRef(OuterRef("pk"))).values( "start" ) middle = Time.objects.annotate(other=Subquery(inner)).values("other")[:1] outer = Time.objects.annotate( other=Subquery(middle, output_field=IntegerField()) ) # This exercises the double OuterRef form with AutoField as pk. self.assertCountEqual(outer, [first, second]) def test_annotations_within_subquery(self): Company.objects.filter(num_employees__lt=50).update( ceo=Employee.objects.get(firstname="Frank") ) inner = ( Company.objects.filter(ceo=OuterRef("pk")) .values("ceo") .annotate(total_employees=Sum("num_employees")) .values("total_employees") ) outer = Employee.objects.annotate(total_employees=Subquery(inner)).filter( salary__lte=Subquery(inner) ) self.assertSequenceEqual( outer.order_by("-total_employees").values("salary", "total_employees"), [ {"salary": 10, "total_employees": 2300}, {"salary": 20, "total_employees": 35}, ], ) def test_subquery_references_joined_table_twice(self): inner = Company.objects.filter( num_chairs__gte=OuterRef("ceo__salary"), num_employees__gte=OuterRef("point_of_contact__salary"), ) # Another contrived example (there is no need to have a subquery here) outer = Company.objects.filter(pk__in=Subquery(inner.values("pk"))) self.assertFalse(outer.exists()) def test_subquery_filter_by_aggregate(self): Number.objects.create(integer=1000, float=1.2) Employee.objects.create(salary=1000) qs = Number.objects.annotate( min_valuable_count=Subquery( Employee.objects.filter( salary=OuterRef("integer"), ) .annotate(cnt=Count("salary")) .filter(cnt__gt=0) .values("cnt")[:1] ), ) self.assertEqual(qs.get().float, 1.2) def test_subquery_filter_by_lazy(self): self.max.manager = Manager.objects.create(name="Manager") self.max.save() max_manager = SimpleLazyObject( lambda: Manager.objects.get(pk=self.max.manager.pk) ) qs = Company.objects.annotate( ceo_manager=Subquery( Employee.objects.filter( lastname=OuterRef("ceo__lastname"), ).values("manager"), ), ).filter(ceo_manager=max_manager) self.assertEqual(qs.get(), self.gmbh) def test_aggregate_subquery_annotation(self): with self.assertNumQueries(1) as ctx: aggregate = Company.objects.annotate( ceo_salary=Subquery( Employee.objects.filter( id=OuterRef("ceo_id"), ).values("salary") ), ).aggregate( ceo_salary_gt_20=Count("pk", filter=Q(ceo_salary__gt=20)), ) self.assertEqual(aggregate, {"ceo_salary_gt_20": 1}) # Aggregation over a subquery annotation doesn't annotate the subquery # twice in the inner query. sql = ctx.captured_queries[0]["sql"] self.assertLessEqual(sql.count("SELECT"), 3) # GROUP BY isn't required to aggregate over a query that doesn't # contain nested aggregates. self.assertNotIn("GROUP BY", sql) def test_object_create_with_f_expression_in_subquery(self): Company.objects.create( name="Big company", num_employees=100000, num_chairs=1, ceo=self.max ) biggest_company = Company.objects.create( name="Biggest company", num_chairs=1, ceo=self.max, num_employees=Subquery( Company.objects.order_by("-num_employees") .annotate(max_num_employees=Max("num_employees")) .annotate(new_num_employees=F("max_num_employees") + 1) .values("new_num_employees")[:1] ), ) biggest_company.refresh_from_db() self.assertEqual(biggest_company.num_employees, 100001) @skipUnlessDBFeature("supports_over_clause") def test_aggregate_rawsql_annotation(self): with self.assertNumQueries(1) as ctx: aggregate = Company.objects.annotate( salary=RawSQL("SUM(num_chairs) OVER (ORDER BY num_employees)", []), ).aggregate( count=Count("pk"), ) self.assertEqual(aggregate, {"count": 3}) sql = ctx.captured_queries[0]["sql"] self.assertNotIn("GROUP BY", sql) def test_explicit_output_field(self): class FuncA(Func): output_field = CharField() class FuncB(Func): pass expr = FuncB(FuncA()) self.assertEqual(expr.output_field, FuncA.output_field) def test_outerref_mixed_case_table_name(self): inner = Result.objects.filter(result_time__gte=OuterRef("experiment__assigned")) outer = Result.objects.filter(pk__in=Subquery(inner.values("pk"))) self.assertFalse(outer.exists()) def test_outerref_with_operator(self): inner = Company.objects.filter(num_employees=OuterRef("ceo__salary") + 2) outer = Company.objects.filter(pk__in=Subquery(inner.values("pk"))) self.assertEqual(outer.get().name, "Test GmbH") def test_nested_outerref_with_function(self): self.gmbh.point_of_contact = Employee.objects.get(lastname="Meyer") self.gmbh.save() inner = Employee.objects.filter( lastname__startswith=Left(OuterRef(OuterRef("lastname")), 1), ) qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( point_of_contact__in=inner, ceo__pk=OuterRef("pk"), ).values("name"), ), ).filter(ceo_company__isnull=False) self.assertEqual(qs.get().ceo_company, "Test GmbH") def test_annotation_with_outerref(self): gmbh_salary = Company.objects.annotate( max_ceo_salary_raise=Subquery( Company.objects.annotate( salary_raise=OuterRef("num_employees") + F("num_employees"), ) .order_by("-salary_raise") .values("salary_raise")[:1], output_field=IntegerField(), ), ).get(pk=self.gmbh.pk) self.assertEqual(gmbh_salary.max_ceo_salary_raise, 2332) def test_annotation_with_nested_outerref(self): self.gmbh.point_of_contact = Employee.objects.get(lastname="Meyer") self.gmbh.save() inner = Employee.objects.annotate( outer_lastname=OuterRef(OuterRef("lastname")), ).filter(lastname__startswith=Left("outer_lastname", 1)) qs = Employee.objects.annotate( ceo_company=Subquery( Company.objects.filter( point_of_contact__in=inner, ceo__pk=OuterRef("pk"), ).values("name"), ), ).filter(ceo_company__isnull=False) self.assertEqual(qs.get().ceo_company, "Test GmbH") def test_pickle_expression(self): expr = Value(1) expr.convert_value # populate cached property self.assertEqual(pickle.loads(pickle.dumps(expr)), expr) def test_incorrect_field_in_F_expression(self): with self.assertRaisesMessage( FieldError, "Cannot resolve keyword 'nope' into field." ): list(Employee.objects.filter(firstname=F("nope"))) def test_incorrect_joined_field_in_F_expression(self): with self.assertRaisesMessage( FieldError, "Cannot resolve keyword 'nope' into field." ): list(Company.objects.filter(ceo__pk=F("point_of_contact__nope"))) def test_exists_in_filter(self): inner = Company.objects.filter(ceo=OuterRef("pk")).values("pk") qs1 = Employee.objects.filter(Exists(inner)) qs2 = Employee.objects.annotate(found=Exists(inner)).filter(found=True) self.assertCountEqual(qs1, qs2) self.assertFalse(Employee.objects.exclude(Exists(inner)).exists()) self.assertCountEqual(qs2, Employee.objects.exclude(~Exists(inner))) def test_subquery_in_filter(self): inner = Company.objects.filter(ceo=OuterRef("pk")).values("based_in_eu") self.assertSequenceEqual( Employee.objects.filter(Subquery(inner)), [self.foobar_ltd.ceo], ) def test_subquery_group_by_outerref_in_filter(self): inner = ( Company.objects.annotate( employee=OuterRef("pk"), ) .values("employee") .annotate( min_num_chairs=Min("num_chairs"), ) .values("ceo") ) self.assertIs(Employee.objects.filter(pk__in=Subquery(inner)).exists(), True) def test_case_in_filter_if_boolean_output_field(self): is_ceo = Company.objects.filter(ceo=OuterRef("pk")) is_poc = Company.objects.filter(point_of_contact=OuterRef("pk")) qs = Employee.objects.filter( Case( When(Exists(is_ceo), then=True), When(Exists(is_poc), then=True), default=False, output_field=BooleanField(), ), ) self.assertCountEqual(qs, [self.example_inc.ceo, self.foobar_ltd.ceo, self.max]) def test_boolean_expression_combined(self): is_ceo = Company.objects.filter(ceo=OuterRef("pk")) is_poc = Company.objects.filter(point_of_contact=OuterRef("pk")) self.gmbh.point_of_contact = self.max self.gmbh.save() self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) | Exists(is_poc)), [self.example_inc.ceo, self.foobar_ltd.ceo, self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) & Exists(is_poc)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_ceo) & Q(salary__gte=30)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Exists(is_poc) | Q(salary__lt=15)), [self.example_inc.ceo, self.max], ) self.assertCountEqual( Employee.objects.filter(Q(salary__gte=30) & Exists(is_ceo)), [self.max], ) self.assertCountEqual( Employee.objects.filter(Q(salary__lt=15) | Exists(is_poc)), [self.example_inc.ceo, self.max], ) def test_boolean_expression_combined_with_empty_Q(self): is_poc = Company.objects.filter(point_of_contact=OuterRef("pk")) self.gmbh.point_of_contact = self.max self.gmbh.save() tests = [ Exists(is_poc) & Q(), Q() & Exists(is_poc), Exists(is_poc) | Q(), Q() | Exists(is_poc), Q(Exists(is_poc)) & Q(), Q() & Q(Exists(is_poc)), Q(Exists(is_poc)) | Q(), Q() | Q(Exists(is_poc)), ] for conditions in tests: with self.subTest(conditions): self.assertCountEqual(Employee.objects.filter(conditions), [self.max]) def test_boolean_expression_in_Q(self): is_poc = Company.objects.filter(point_of_contact=OuterRef("pk")) self.gmbh.point_of_contact = self.max self.gmbh.save() self.assertCountEqual(Employee.objects.filter(Q(Exists(is_poc))), [self.max]) class IterableLookupInnerExpressionsTests(TestCase): @classmethod def setUpTestData(cls): ceo = Employee.objects.create(firstname="Just", lastname="Doit", salary=30) # MySQL requires that the values calculated for expressions don't pass # outside of the field's range, so it's inconvenient to use the values # in the more general tests. cls.c5020 = Company.objects.create( name="5020 Ltd", num_employees=50, num_chairs=20, ceo=ceo ) cls.c5040 = Company.objects.create( name="5040 Ltd", num_employees=50, num_chairs=40, ceo=ceo ) cls.c5050 = Company.objects.create( name="5050 Ltd", num_employees=50, num_chairs=50, ceo=ceo ) cls.c5060 = Company.objects.create( name="5060 Ltd", num_employees=50, num_chairs=60, ceo=ceo ) cls.c99300 = Company.objects.create( name="99300 Ltd", num_employees=99, num_chairs=300, ceo=ceo ) def test_in_lookup_allows_F_expressions_and_expressions_for_integers(self): # __in lookups can use F() expressions for integers. queryset = Company.objects.filter(num_employees__in=([F("num_chairs") - 10])) self.assertSequenceEqual(queryset, [self.c5060]) self.assertCountEqual( Company.objects.filter( num_employees__in=([F("num_chairs") - 10, F("num_chairs") + 10]) ), [self.c5040, self.c5060], ) self.assertCountEqual( Company.objects.filter( num_employees__in=( [F("num_chairs") - 10, F("num_chairs"), F("num_chairs") + 10] ) ), [self.c5040, self.c5050, self.c5060], ) def test_expressions_in_lookups_join_choice(self): midpoint = datetime.time(13, 0) t1 = Time.objects.create(time=datetime.time(12, 0)) t2 = Time.objects.create(time=datetime.time(14, 0)) s1 = SimulationRun.objects.create(start=t1, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=t1, end=None, midpoint=midpoint) SimulationRun.objects.create(start=None, end=t2, midpoint=midpoint) SimulationRun.objects.create(start=None, end=None, midpoint=midpoint) queryset = SimulationRun.objects.filter( midpoint__range=[F("start__time"), F("end__time")] ) self.assertSequenceEqual(queryset, [s1]) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.INNER) queryset = SimulationRun.objects.exclude( midpoint__range=[F("start__time"), F("end__time")] ) self.assertQuerySetEqual(queryset, [], ordered=False) for alias in queryset.query.alias_map.values(): if isinstance(alias, Join): self.assertEqual(alias.join_type, constants.LOUTER) def test_range_lookup_allows_F_expressions_and_expressions_for_integers(self): # Range lookups can use F() expressions for integers. Company.objects.filter(num_employees__exact=F("num_chairs")) self.assertCountEqual( Company.objects.filter(num_employees__range=(F("num_chairs"), 100)), [self.c5020, self.c5040, self.c5050], ) self.assertCountEqual( Company.objects.filter( num_employees__range=(F("num_chairs") - 10, F("num_chairs") + 10) ), [self.c5040, self.c5050, self.c5060], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(F("num_chairs") - 10, 100)), [self.c5020, self.c5040, self.c5050, self.c5060], ) self.assertCountEqual( Company.objects.filter(num_employees__range=(1, 100)), [self.c5020, self.c5040, self.c5050, self.c5060, self.c99300], ) def test_range_lookup_namedtuple(self): EmployeeRange = namedtuple("EmployeeRange", ["minimum", "maximum"]) qs = Company.objects.filter( num_employees__range=EmployeeRange(minimum=51, maximum=100), ) self.assertSequenceEqual(qs, [self.c99300]) @unittest.skipUnless( connection.vendor == "sqlite", "This defensive test only works on databases that don't validate parameter " "types", ) def test_expressions_not_introduce_sql_injection_via_untrusted_string_inclusion( self, ): """ This tests that SQL injection isn't possible using compilation of expressions in iterable filters, as their compilation happens before the main query compilation. It's limited to SQLite, as PostgreSQL, Oracle and other vendors have defense in depth against this by type checking. Testing against SQLite (the most permissive of the built-in databases) demonstrates that the problem doesn't exist while keeping the test simple. """ queryset = Company.objects.filter(name__in=[F("num_chairs") + "1)) OR ((1==1"]) self.assertQuerySetEqual(queryset, [], ordered=False) def test_in_lookup_allows_F_expressions_and_expressions_for_datetimes(self): start = datetime.datetime(2016, 2, 3, 15, 0, 0) end = datetime.datetime(2016, 2, 5, 15, 0, 0) experiment_1 = Experiment.objects.create( name="Integrity testing", assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) experiment_2 = Experiment.objects.create( name="Taste testing", assigned=start.date(), start=start, end=end, completed=end.date(), estimated_time=end - start, ) r1 = Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 2, 4, 15, 0, 0), ) Result.objects.create( experiment=experiment_1, result_time=datetime.datetime(2016, 3, 10, 2, 0, 0), ) Result.objects.create( experiment=experiment_2, result_time=datetime.datetime(2016, 1, 8, 5, 0, 0), ) within_experiment_time = [F("experiment__start"), F("experiment__end")] queryset = Result.objects.filter(result_time__range=within_experiment_time) self.assertSequenceEqual(queryset, [r1]) class FTests(SimpleTestCase): def test_deepcopy(self): f = F("foo") g = deepcopy(f) self.assertEqual(f.name, g.name) def test_deconstruct(self): f = F("name") path, args, kwargs = f.deconstruct() self.assertEqual(path, "django.db.models.F") self.assertEqual(args, (f.name,)) self.assertEqual(kwargs, {}) def test_equal(self): f = F("name") same_f = F("name") other_f = F("username") self.assertEqual(f, same_f) self.assertNotEqual(f, other_f) def test_hash(self): d = {F("name"): "Bob"} self.assertIn(F("name"), d) self.assertEqual(d[F("name")], "Bob") def test_not_equal_Value(self): f = F("name") value = Value("name") self.assertNotEqual(f, value) self.assertNotEqual(value, f) class ExpressionsTests(TestCase): def test_F_reuse(self): f = F("id") n = Number.objects.create(integer=-1) c = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=Employee.objects.create(firstname="Joe", lastname="Smith"), ) c_qs = Company.objects.filter(id=f) self.assertEqual(c_qs.get(), c) # Reuse the same F-object for another queryset n_qs = Number.objects.filter(id=f) self.assertEqual(n_qs.get(), n) # The original query still works correctly self.assertEqual(c_qs.get(), c) def test_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a pattern lookup with an expression refs #16731 """ Employee.objects.bulk_create( [ Employee(firstname="Johnny", lastname="%John"), Employee(firstname="Jean-Claude", lastname="Claud_"), Employee(firstname="Jean-Claude", lastname="Claude%"), Employee(firstname="Johnny", lastname="Joh\\n"), Employee(firstname="Johnny", lastname="_ohn"), ] ) claude = Employee.objects.create(firstname="Jean-Claude", lastname="Claude") john = Employee.objects.create(firstname="Johnny", lastname="John") john_sign = Employee.objects.create(firstname="%Joh\\nny", lastname="%Joh\\n") self.assertCountEqual( Employee.objects.filter(firstname__contains=F("lastname")), [john_sign, john, claude], ) self.assertCountEqual( Employee.objects.filter(firstname__startswith=F("lastname")), [john_sign, john], ) self.assertSequenceEqual( Employee.objects.filter(firstname__endswith=F("lastname")), [claude], ) def test_insensitive_patterns_escape(self): r""" Special characters (e.g. %, _ and \) stored in database are properly escaped when using a case insensitive pattern lookup with an expression -- refs #16731 """ Employee.objects.bulk_create( [ Employee(firstname="Johnny", lastname="%john"), Employee(firstname="Jean-Claude", lastname="claud_"), Employee(firstname="Jean-Claude", lastname="claude%"), Employee(firstname="Johnny", lastname="joh\\n"), Employee(firstname="Johnny", lastname="_ohn"), ] ) claude = Employee.objects.create(firstname="Jean-Claude", lastname="claude") john = Employee.objects.create(firstname="Johnny", lastname="john") john_sign = Employee.objects.create(firstname="%Joh\\nny", lastname="%joh\\n") self.assertCountEqual( Employee.objects.filter(firstname__icontains=F("lastname")), [john_sign, john, claude], ) self.assertCountEqual( Employee.objects.filter(firstname__istartswith=F("lastname")), [john_sign, john], ) self.assertSequenceEqual( Employee.objects.filter(firstname__iendswith=F("lastname")), [claude], ) @isolate_apps("expressions") class SimpleExpressionTests(SimpleTestCase): def test_equal(self): self.assertEqual(Expression(), Expression()) self.assertEqual( Expression(IntegerField()), Expression(output_field=IntegerField()) ) self.assertEqual(Expression(IntegerField()), mock.ANY) self.assertNotEqual(Expression(IntegerField()), Expression(CharField())) class TestModel(Model): field = IntegerField() other_field = IntegerField() self.assertNotEqual( Expression(TestModel._meta.get_field("field")), Expression(TestModel._meta.get_field("other_field")), ) def test_hash(self): self.assertEqual(hash(Expression()), hash(Expression())) self.assertEqual( hash(Expression(IntegerField())), hash(Expression(output_field=IntegerField())), ) self.assertNotEqual( hash(Expression(IntegerField())), hash(Expression(CharField())), ) class TestModel(Model): field = IntegerField() other_field = IntegerField() self.assertNotEqual( hash(Expression(TestModel._meta.get_field("field"))), hash(Expression(TestModel._meta.get_field("other_field"))), ) class ExpressionsNumericTests(TestCase): @classmethod def setUpTestData(cls): Number(integer=-1).save() Number(integer=42).save() Number(integer=1337).save() Number.objects.update(float=F("integer")) def test_fill_with_value_from_same_object(self): """ We can fill a value in all objects with an other value of the same object. """ self.assertQuerySetEqual( Number.objects.all(), [(-1, -1), (42, 42), (1337, 1337)], lambda n: (n.integer, round(n.float)), ordered=False, ) def test_increment_value(self): """ We can increment a value of all objects in a query set. """ self.assertEqual( Number.objects.filter(integer__gt=0).update(integer=F("integer") + 1), 2 ) self.assertQuerySetEqual( Number.objects.all(), [(-1, -1), (43, 42), (1338, 1337)], lambda n: (n.integer, round(n.float)), ordered=False, ) def test_filter_not_equals_other_field(self): """ We can filter for objects, where a value is not equals the value of an other field. """ self.assertEqual( Number.objects.filter(integer__gt=0).update(integer=F("integer") + 1), 2 ) self.assertQuerySetEqual( Number.objects.exclude(float=F("integer")), [(43, 42), (1338, 1337)], lambda n: (n.integer, round(n.float)), ordered=False, ) def test_filter_decimal_expression(self): obj = Number.objects.create(integer=0, float=1, decimal_value=Decimal("1")) qs = Number.objects.annotate( x=ExpressionWrapper(Value(1), output_field=DecimalField()), ).filter(Q(x=1, integer=0) & Q(x=Decimal("1"))) self.assertSequenceEqual(qs, [obj]) def test_complex_expressions(self): """ Complex expressions of different connection types are possible. """ n = Number.objects.create(integer=10, float=123.45) self.assertEqual( Number.objects.filter(pk=n.pk).update(float=F("integer") + F("float") * 2), 1, ) self.assertEqual(Number.objects.get(pk=n.pk).integer, 10) self.assertEqual( Number.objects.get(pk=n.pk).float, Approximate(256.900, places=3) ) def test_decimal_expression(self): n = Number.objects.create(integer=1, decimal_value=Decimal("0.5")) n.decimal_value = F("decimal_value") - Decimal("0.4") n.save() n.refresh_from_db() self.assertEqual(n.decimal_value, Decimal("0.1")) class ExpressionOperatorTests(TestCase): @classmethod def setUpTestData(cls): cls.n = Number.objects.create(integer=42, float=15.5) cls.n1 = Number.objects.create(integer=-42, float=-15.5) def test_lefthand_addition(self): # LH Addition of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F("integer") + 15, float=F("float") + 42.7 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual( Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3) ) def test_lefthand_subtraction(self): # LH Subtraction of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F("integer") - 15, float=F("float") - 42.7 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) self.assertEqual( Number.objects.get(pk=self.n.pk).float, Approximate(-27.200, places=3) ) def test_lefthand_multiplication(self): # Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F("integer") * 15, float=F("float") * 42.7 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual( Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3) ) def test_lefthand_division(self): # LH Division of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F("integer") / 2, float=F("float") / 42.7 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 21) self.assertEqual( Number.objects.get(pk=self.n.pk).float, Approximate(0.363, places=3) ) def test_lefthand_modulo(self): # LH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=F("integer") % 20) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 2) def test_lefthand_modulo_null(self): # LH Modulo arithmetic on integers. Employee.objects.create(firstname="John", lastname="Doe", salary=None) qs = Employee.objects.annotate(modsalary=F("salary") % 20) self.assertIsNone(qs.get().salary) def test_lefthand_bitwise_and(self): # LH Bitwise ands on integers Number.objects.filter(pk=self.n.pk).update(integer=F("integer").bitand(56)) Number.objects.filter(pk=self.n1.pk).update(integer=F("integer").bitand(-56)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 40) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -64) def test_lefthand_bitwise_left_shift_operator(self): Number.objects.update(integer=F("integer").bitleftshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 168) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -168) def test_lefthand_bitwise_right_shift_operator(self): Number.objects.update(integer=F("integer").bitrightshift(2)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 10) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -11) def test_lefthand_bitwise_or(self): # LH Bitwise or on integers Number.objects.update(integer=F("integer").bitor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 58) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -10) def test_lefthand_transformed_field_bitwise_or(self): Employee.objects.create(firstname="Max", lastname="Mustermann") with register_lookup(CharField, Length): qs = Employee.objects.annotate(bitor=F("lastname__length").bitor(48)) self.assertEqual(qs.get().bitor, 58) def test_lefthand_power(self): # LH Power arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update( integer=F("integer") ** 2, float=F("float") ** 1.5 ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 1764) self.assertEqual( Number.objects.get(pk=self.n.pk).float, Approximate(61.02, places=2) ) def test_lefthand_bitwise_xor(self): Number.objects.update(integer=F("integer").bitxor(48)) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 26) self.assertEqual(Number.objects.get(pk=self.n1.pk).integer, -26) def test_lefthand_bitwise_xor_null(self): employee = Employee.objects.create(firstname="John", lastname="Doe") Employee.objects.update(salary=F("salary").bitxor(48)) employee.refresh_from_db() self.assertIsNone(employee.salary) def test_lefthand_bitwise_xor_right_null(self): employee = Employee.objects.create(firstname="John", lastname="Doe", salary=48) Employee.objects.update(salary=F("salary").bitxor(None)) employee.refresh_from_db() self.assertIsNone(employee.salary) @unittest.skipUnless( connection.vendor == "oracle", "Oracle doesn't support bitwise XOR." ) def test_lefthand_bitwise_xor_not_supported(self): msg = "Bitwise XOR is not supported in Oracle." with self.assertRaisesMessage(NotSupportedError, msg): Number.objects.update(integer=F("integer").bitxor(48)) def test_right_hand_addition(self): # Right hand operators Number.objects.filter(pk=self.n.pk).update( integer=15 + F("integer"), float=42.7 + F("float") ) # RH Addition of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 57) self.assertEqual( Number.objects.get(pk=self.n.pk).float, Approximate(58.200, places=3) ) def test_right_hand_subtraction(self): Number.objects.filter(pk=self.n.pk).update( integer=15 - F("integer"), float=42.7 - F("float") ) # RH Subtraction of floats and integers self.assertEqual(Number.objects.get(pk=self.n.pk).integer, -27) self.assertEqual( Number.objects.get(pk=self.n.pk).float, Approximate(27.200, places=3) ) def test_right_hand_multiplication(self): # RH Multiplication of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=15 * F("integer"), float=42.7 * F("float") ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 630) self.assertEqual( Number.objects.get(pk=self.n.pk).float, Approximate(661.850, places=3) ) def test_right_hand_division(self): # RH Division of floats and integers Number.objects.filter(pk=self.n.pk).update( integer=640 / F("integer"), float=42.7 / F("float") ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 15) self.assertEqual( Number.objects.get(pk=self.n.pk).float, Approximate(2.755, places=3) ) def test_right_hand_modulo(self): # RH Modulo arithmetic on integers Number.objects.filter(pk=self.n.pk).update(integer=69 % F("integer")) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 27) def test_righthand_power(self): # RH Power arithmetic operation on floats and integers Number.objects.filter(pk=self.n.pk).update( integer=2 ** F("integer"), float=1.5 ** F("float") ) self.assertEqual(Number.objects.get(pk=self.n.pk).integer, 4398046511104) self.assertEqual( Number.objects.get(pk=self.n.pk).float, Approximate(536.308, places=3) ) class FTimeDeltaTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) midnight = datetime.time(0) delta0 = datetime.timedelta(0) delta1 = datetime.timedelta(microseconds=253000) delta2 = datetime.timedelta(seconds=44) delta3 = datetime.timedelta(hours=21, minutes=8) delta4 = datetime.timedelta(days=10) delta5 = datetime.timedelta(days=90) # Test data is set so that deltas and delays will be # strictly increasing. cls.deltas = [] cls.delays = [] cls.days_long = [] # e0: started same day as assigned, zero duration end = stime + delta0 cls.e0 = Experiment.objects.create( name="e0", assigned=sday, start=stime, end=end, completed=end.date(), estimated_time=delta0, ) cls.deltas.append(delta0) cls.delays.append( cls.e0.start - datetime.datetime.combine(cls.e0.assigned, midnight) ) cls.days_long.append(cls.e0.completed - cls.e0.assigned) # e1: started one day after assigned, tiny duration, data # set so that end time has no fractional seconds, which # tests an edge case on sqlite. delay = datetime.timedelta(1) end = stime + delay + delta1 e1 = Experiment.objects.create( name="e1", assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta1, ) cls.deltas.append(delta1) cls.delays.append(e1.start - datetime.datetime.combine(e1.assigned, midnight)) cls.days_long.append(e1.completed - e1.assigned) # e2: started three days after assigned, small duration end = stime + delta2 e2 = Experiment.objects.create( name="e2", assigned=sday - datetime.timedelta(3), start=stime, end=end, completed=end.date(), estimated_time=datetime.timedelta(hours=1), ) cls.deltas.append(delta2) cls.delays.append(e2.start - datetime.datetime.combine(e2.assigned, midnight)) cls.days_long.append(e2.completed - e2.assigned) # e3: started four days after assigned, medium duration delay = datetime.timedelta(4) end = stime + delay + delta3 e3 = Experiment.objects.create( name="e3", assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta3, ) cls.deltas.append(delta3) cls.delays.append(e3.start - datetime.datetime.combine(e3.assigned, midnight)) cls.days_long.append(e3.completed - e3.assigned) # e4: started 10 days after assignment, long duration end = stime + delta4 e4 = Experiment.objects.create( name="e4", assigned=sday - datetime.timedelta(10), start=stime, end=end, completed=end.date(), estimated_time=delta4 - datetime.timedelta(1), ) cls.deltas.append(delta4) cls.delays.append(e4.start - datetime.datetime.combine(e4.assigned, midnight)) cls.days_long.append(e4.completed - e4.assigned) # e5: started a month after assignment, very long duration delay = datetime.timedelta(30) end = stime + delay + delta5 e5 = Experiment.objects.create( name="e5", assigned=sday, start=stime + delay, end=end, completed=end.date(), estimated_time=delta5, ) cls.deltas.append(delta5) cls.delays.append(e5.start - datetime.datetime.combine(e5.assigned, midnight)) cls.days_long.append(e5.completed - e5.assigned) cls.expnames = [e.name for e in Experiment.objects.all()] def test_multiple_query_compilation(self): # Ticket #21643 queryset = Experiment.objects.filter( end__lt=F("start") + datetime.timedelta(hours=1) ) q1 = str(queryset.query) q2 = str(queryset.query) self.assertEqual(q1, q2) def test_query_clone(self): # Ticket #21643 - Crash when compiling query more than once qs = Experiment.objects.filter(end__lt=F("start") + datetime.timedelta(hours=1)) qs2 = qs.all() list(qs) list(qs2) # Intentionally no assert def test_delta_add(self): for i, delta in enumerate(self.deltas): test_set = [ e.name for e in Experiment.objects.filter(end__lt=F("start") + delta) ] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter(end__lt=delta + F("start")) ] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter(end__lte=F("start") + delta) ] self.assertEqual(test_set, self.expnames[: i + 1]) def test_delta_subtract(self): for i, delta in enumerate(self.deltas): test_set = [ e.name for e in Experiment.objects.filter(start__gt=F("end") - delta) ] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter(start__gte=F("end") - delta) ] self.assertEqual(test_set, self.expnames[: i + 1]) def test_exclude(self): for i, delta in enumerate(self.deltas): test_set = [ e.name for e in Experiment.objects.exclude(end__lt=F("start") + delta) ] self.assertEqual(test_set, self.expnames[i:]) test_set = [ e.name for e in Experiment.objects.exclude(end__lte=F("start") + delta) ] self.assertEqual(test_set, self.expnames[i + 1 :]) def test_date_comparison(self): for i, days in enumerate(self.days_long): test_set = [ e.name for e in Experiment.objects.filter(completed__lt=F("assigned") + days) ] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter(completed__lte=F("assigned") + days) ] self.assertEqual(test_set, self.expnames[: i + 1]) def test_datetime_and_durationfield_addition_with_filter(self): test_set = Experiment.objects.filter(end=F("start") + F("estimated_time")) self.assertGreater(test_set.count(), 0) self.assertEqual( [e.name for e in test_set], [ e.name for e in Experiment.objects.all() if e.end == e.start + e.estimated_time ], ) def test_datetime_and_duration_field_addition_with_annotate_and_no_output_field( self, ): test_set = Experiment.objects.annotate( estimated_end=F("start") + F("estimated_time") ) self.assertEqual( [e.estimated_end for e in test_set], [e.start + e.estimated_time for e in test_set], ) @skipUnlessDBFeature("supports_temporal_subtraction") def test_datetime_subtraction_with_annotate_and_no_output_field(self): test_set = Experiment.objects.annotate( calculated_duration=F("end") - F("start") ) self.assertEqual( [e.calculated_duration for e in test_set], [e.end - e.start for e in test_set], ) def test_mixed_comparisons1(self): for i, delay in enumerate(self.delays): test_set = [ e.name for e in Experiment.objects.filter(assigned__gt=F("start") - delay) ] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter(assigned__gte=F("start") - delay) ] self.assertEqual(test_set, self.expnames[: i + 1]) def test_mixed_comparisons2(self): for i, delay in enumerate(self.delays): delay = datetime.timedelta(delay.days) test_set = [ e.name for e in Experiment.objects.filter(start__lt=F("assigned") + delay) ] self.assertEqual(test_set, self.expnames[:i]) test_set = [ e.name for e in Experiment.objects.filter( start__lte=F("assigned") + delay + datetime.timedelta(1) ) ] self.assertEqual(test_set, self.expnames[: i + 1]) def test_delta_update(self): for delta in self.deltas: exps = Experiment.objects.all() expected_durations = [e.duration() for e in exps] expected_starts = [e.start + delta for e in exps] expected_ends = [e.end + delta for e in exps] Experiment.objects.update(start=F("start") + delta, end=F("end") + delta) exps = Experiment.objects.all() new_starts = [e.start for e in exps] new_ends = [e.end for e in exps] new_durations = [e.duration() for e in exps] self.assertEqual(expected_starts, new_starts) self.assertEqual(expected_ends, new_ends) self.assertEqual(expected_durations, new_durations) def test_invalid_operator(self): with self.assertRaises(DatabaseError): list(Experiment.objects.filter(start=F("start") * datetime.timedelta(0))) def test_durationfield_add(self): zeros = [ e.name for e in Experiment.objects.filter(start=F("start") + F("estimated_time")) ] self.assertEqual(zeros, ["e0"]) end_less = [ e.name for e in Experiment.objects.filter(end__lt=F("start") + F("estimated_time")) ] self.assertEqual(end_less, ["e2"]) delta_math = [ e.name for e in Experiment.objects.filter( end__gte=F("start") + F("estimated_time") + datetime.timedelta(hours=1) ) ] self.assertEqual(delta_math, ["e4"]) queryset = Experiment.objects.annotate( shifted=ExpressionWrapper( F("start") + Value(None, output_field=DurationField()), output_field=DateTimeField(), ) ) self.assertIsNone(queryset.first().shifted) def test_durationfield_multiply_divide(self): Experiment.objects.update(scalar=2) tests = [ (Decimal("2"), 2), (F("scalar"), 2), (2, 2), (3.2, 3.2), ] for expr, scalar in tests: with self.subTest(expr=expr): qs = Experiment.objects.annotate( multiplied=ExpressionWrapper( expr * F("estimated_time"), output_field=DurationField(), ), divided=ExpressionWrapper( F("estimated_time") / expr, output_field=DurationField(), ), ) for experiment in qs: self.assertEqual( experiment.multiplied, experiment.estimated_time * scalar, ) self.assertEqual( experiment.divided, experiment.estimated_time / scalar, ) def test_duration_expressions(self): for delta in self.deltas: qs = Experiment.objects.annotate(duration=F("estimated_time") + delta) for obj in qs: self.assertEqual(obj.duration, obj.estimated_time + delta) @skipUnlessDBFeature("supports_temporal_subtraction") def test_date_subtraction(self): queryset = Experiment.objects.annotate( completion_duration=F("completed") - F("assigned"), ) at_least_5_days = { e.name for e in queryset.filter( completion_duration__gte=datetime.timedelta(days=5) ) } self.assertEqual(at_least_5_days, {"e3", "e4", "e5"}) at_least_120_days = { e.name for e in queryset.filter( completion_duration__gte=datetime.timedelta(days=120) ) } self.assertEqual(at_least_120_days, {"e5"}) less_than_5_days = { e.name for e in queryset.filter(completion_duration__lt=datetime.timedelta(days=5)) } self.assertEqual(less_than_5_days, {"e0", "e1", "e2"}) queryset = Experiment.objects.annotate( difference=F("completed") - Value(None, output_field=DateField()), ) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate( shifted=ExpressionWrapper( F("completed") - Value(None, output_field=DurationField()), output_field=DateField(), ) ) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature("supports_temporal_subtraction") def test_date_subquery_subtraction(self): subquery = Experiment.objects.filter(pk=OuterRef("pk")).values("completed") queryset = Experiment.objects.annotate( difference=subquery - F("completed"), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature("supports_temporal_subtraction") def test_date_case_subtraction(self): queryset = Experiment.objects.annotate( date_case=Case( When(Q(name="e0"), then=F("completed")), output_field=DateField(), ), completed_value=Value( self.e0.completed, output_field=DateField(), ), difference=F("date_case") - F("completed_value"), ).filter(difference=datetime.timedelta()) self.assertEqual(queryset.get(), self.e0) @skipUnlessDBFeature("supports_temporal_subtraction") def test_time_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) queryset = Time.objects.annotate( difference=F("time") - Value(datetime.time(11, 15, 0)), ) self.assertEqual( queryset.get().difference, datetime.timedelta(hours=1, minutes=15, seconds=15, microseconds=2345), ) queryset = Time.objects.annotate( difference=F("time") - Value(None, output_field=TimeField()), ) self.assertIsNone(queryset.first().difference) queryset = Time.objects.annotate( shifted=ExpressionWrapper( F("time") - Value(None, output_field=DurationField()), output_field=TimeField(), ) ) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature("supports_temporal_subtraction") def test_time_subquery_subtraction(self): Time.objects.create(time=datetime.time(12, 30, 15, 2345)) subquery = Time.objects.filter(pk=OuterRef("pk")).values("time") queryset = Time.objects.annotate( difference=subquery - F("time"), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature("supports_temporal_subtraction") def test_datetime_subtraction(self): under_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__gt=F("end") - F("start")) ] self.assertEqual(under_estimate, ["e2"]) over_estimate = [ e.name for e in Experiment.objects.filter(estimated_time__lt=F("end") - F("start")) ] self.assertEqual(over_estimate, ["e4"]) queryset = Experiment.objects.annotate( difference=F("start") - Value(None, output_field=DateTimeField()), ) self.assertIsNone(queryset.first().difference) queryset = Experiment.objects.annotate( shifted=ExpressionWrapper( F("start") - Value(None, output_field=DurationField()), output_field=DateTimeField(), ) ) self.assertIsNone(queryset.first().shifted) @skipUnlessDBFeature("supports_temporal_subtraction") def test_datetime_subquery_subtraction(self): subquery = Experiment.objects.filter(pk=OuterRef("pk")).values("start") queryset = Experiment.objects.annotate( difference=subquery - F("start"), ).filter(difference=datetime.timedelta()) self.assertTrue(queryset.exists()) @skipUnlessDBFeature("supports_temporal_subtraction") def test_datetime_subtraction_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) Experiment.objects.update(end=F("start") + delta) qs = Experiment.objects.annotate(delta=F("end") - F("start")) for e in qs: self.assertEqual(e.delta, delta) def test_duration_with_datetime(self): # Exclude e1 which has very high precision so we can test this on all # backends regardless of whether or not it supports # microsecond_precision. over_estimate = ( Experiment.objects.exclude(name="e1") .filter( completed__gt=self.stime + F("estimated_time"), ) .order_by("name") ) self.assertQuerySetEqual(over_estimate, ["e3", "e4", "e5"], lambda e: e.name) def test_duration_with_datetime_microseconds(self): delta = datetime.timedelta(microseconds=8999999999999999) qs = Experiment.objects.annotate( dt=ExpressionWrapper( F("start") + delta, output_field=DateTimeField(), ) ) for e in qs: self.assertEqual(e.dt, e.start + delta) def test_date_minus_duration(self): more_than_4_days = Experiment.objects.filter( assigned__lt=F("completed") - Value(datetime.timedelta(days=4)) ) self.assertQuerySetEqual(more_than_4_days, ["e3", "e4", "e5"], lambda e: e.name) def test_negative_timedelta_update(self): # subtract 30 seconds, 30 minutes, 2 hours and 2 days experiments = ( Experiment.objects.filter(name="e0") .annotate( start_sub_seconds=F("start") + datetime.timedelta(seconds=-30), ) .annotate( start_sub_minutes=F("start_sub_seconds") + datetime.timedelta(minutes=-30), ) .annotate( start_sub_hours=F("start_sub_minutes") + datetime.timedelta(hours=-2), ) .annotate( new_start=F("start_sub_hours") + datetime.timedelta(days=-2), ) ) expected_start = datetime.datetime(2010, 6, 23, 9, 45, 0) # subtract 30 microseconds experiments = experiments.annotate( new_start=F("new_start") + datetime.timedelta(microseconds=-30) ) expected_start += datetime.timedelta(microseconds=+746970) experiments.update(start=F("new_start")) e0 = Experiment.objects.get(name="e0") self.assertEqual(e0.start, expected_start) class ValueTests(TestCase): def test_update_TimeField_using_Value(self): Time.objects.create() Time.objects.update(time=Value(datetime.time(1), output_field=TimeField())) self.assertEqual(Time.objects.get().time, datetime.time(1)) def test_update_UUIDField_using_Value(self): UUID.objects.create() UUID.objects.update( uuid=Value( uuid.UUID("12345678901234567890123456789012"), output_field=UUIDField() ) ) self.assertEqual( UUID.objects.get().uuid, uuid.UUID("12345678901234567890123456789012") ) def test_deconstruct(self): value = Value("name") path, args, kwargs = value.deconstruct() self.assertEqual(path, "django.db.models.Value") self.assertEqual(args, (value.value,)) self.assertEqual(kwargs, {}) def test_deconstruct_output_field(self): value = Value("name", output_field=CharField()) path, args, kwargs = value.deconstruct() self.assertEqual(path, "django.db.models.Value") self.assertEqual(args, (value.value,)) self.assertEqual(len(kwargs), 1) self.assertEqual( kwargs["output_field"].deconstruct(), CharField().deconstruct() ) def test_repr(self): tests = [ (None, "Value(None)"), ("str", "Value('str')"), (True, "Value(True)"), (42, "Value(42)"), ( datetime.datetime(2019, 5, 15), "Value(datetime.datetime(2019, 5, 15, 0, 0))", ), (Decimal("3.14"), "Value(Decimal('3.14'))"), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(repr(Value(value)), expected) def test_equal(self): value = Value("name") self.assertEqual(value, Value("name")) self.assertNotEqual(value, Value("username")) def test_hash(self): d = {Value("name"): "Bob"} self.assertIn(Value("name"), d) self.assertEqual(d[Value("name")], "Bob") def test_equal_output_field(self): value = Value("name", output_field=CharField()) same_value = Value("name", output_field=CharField()) other_value = Value("name", output_field=TimeField()) no_output_field = Value("name") self.assertEqual(value, same_value) self.assertNotEqual(value, other_value) self.assertNotEqual(value, no_output_field) def test_raise_empty_expressionlist(self): msg = "ExpressionList requires at least one expression" with self.assertRaisesMessage(ValueError, msg): ExpressionList() def test_compile_unresolved(self): # This test might need to be revisited later on if #25425 is enforced. compiler = Time.objects.all().query.get_compiler(connection=connection) value = Value("foo") self.assertEqual(value.as_sql(compiler, connection), ("%s", ["foo"])) value = Value("foo", output_field=CharField()) self.assertEqual(value.as_sql(compiler, connection), ("%s", ["foo"])) def test_output_field_decimalfield(self): Time.objects.create() time = Time.objects.annotate(one=Value(1, output_field=DecimalField())).first() self.assertEqual(time.one, 1) def test_resolve_output_field(self): value_types = [ ("str", CharField), (True, BooleanField), (42, IntegerField), (3.14, FloatField), (datetime.date(2019, 5, 15), DateField), (datetime.datetime(2019, 5, 15), DateTimeField), (datetime.time(3, 16), TimeField), (datetime.timedelta(1), DurationField), (Decimal("3.14"), DecimalField), (b"", BinaryField), (uuid.uuid4(), UUIDField), ] for value, output_field_type in value_types: with self.subTest(type=type(value)): expr = Value(value) self.assertIsInstance(expr.output_field, output_field_type) def test_resolve_output_field_failure(self): msg = "Cannot resolve expression type, unknown output_field" with self.assertRaisesMessage(FieldError, msg): Value(object()).output_field def test_output_field_does_not_create_broken_validators(self): """ The output field for a given Value doesn't get cleaned & validated, however validators may still be instantiated for a given field type and this demonstrates that they don't throw an exception. """ value_types = [ "str", True, 42, 3.14, datetime.date(2019, 5, 15), datetime.datetime(2019, 5, 15), datetime.time(3, 16), datetime.timedelta(1), Decimal("3.14"), b"", uuid.uuid4(), ] for value in value_types: with self.subTest(type=type(value)): field = Value(value)._resolve_output_field() field.clean(value, model_instance=None) class ExistsTests(TestCase): def test_optimizations(self): with CaptureQueriesContext(connection) as context: list( Experiment.objects.values( exists=Exists( Experiment.objects.order_by("pk"), ) ).order_by() ) captured_queries = context.captured_queries self.assertEqual(len(captured_queries), 1) captured_sql = captured_queries[0]["sql"] self.assertNotIn( connection.ops.quote_name(Experiment._meta.pk.column), captured_sql, ) self.assertIn( connection.ops.limit_offset_sql(None, 1), captured_sql, ) self.assertNotIn("ORDER BY", captured_sql) def test_negated_empty_exists(self): manager = Manager.objects.create() qs = Manager.objects.filter(~Exists(Manager.objects.none()) & Q(pk=manager.pk)) self.assertSequenceEqual(qs, [manager]) def test_select_negated_empty_exists(self): manager = Manager.objects.create() qs = Manager.objects.annotate( not_exists=~Exists(Manager.objects.none()) ).filter(pk=manager.pk) self.assertSequenceEqual(qs, [manager]) self.assertIs(qs.get().not_exists, True) class FieldTransformTests(TestCase): @classmethod def setUpTestData(cls): cls.sday = sday = datetime.date(2010, 6, 25) cls.stime = stime = datetime.datetime(2010, 6, 25, 12, 15, 30, 747000) cls.ex1 = Experiment.objects.create( name="Experiment 1", assigned=sday, completed=sday + datetime.timedelta(2), estimated_time=datetime.timedelta(2), start=stime, end=stime + datetime.timedelta(2), ) def test_month_aggregation(self): self.assertEqual( Experiment.objects.aggregate(month_count=Count("assigned__month")), {"month_count": 1}, ) def test_transform_in_values(self): self.assertSequenceEqual( Experiment.objects.values("assigned__month"), [{"assigned__month": 6}], ) def test_multiple_transforms_in_values(self): self.assertSequenceEqual( Experiment.objects.values("end__date__month"), [{"end__date__month": 6}], ) class ReprTests(SimpleTestCase): def test_expressions(self): self.assertEqual( repr(Case(When(a=1))), "<Case: CASE WHEN <Q: (AND: ('a', 1))> THEN Value(None), ELSE Value(None)>", ) self.assertEqual( repr(When(Q(age__gte=18), then=Value("legal"))), "<When: WHEN <Q: (AND: ('age__gte', 18))> THEN Value('legal')>", ) self.assertEqual(repr(Col("alias", "field")), "Col(alias, field)") self.assertEqual(repr(F("published")), "F(published)") self.assertEqual( repr(F("cost") + F("tax")), "<CombinedExpression: F(cost) + F(tax)>" ) self.assertEqual( repr(ExpressionWrapper(F("cost") + F("tax"), IntegerField())), "ExpressionWrapper(F(cost) + F(tax))", ) self.assertEqual( repr(Func("published", function="TO_CHAR")), "Func(F(published), function=TO_CHAR)", ) self.assertEqual(repr(OrderBy(Value(1))), "OrderBy(Value(1), descending=False)") self.assertEqual(repr(RawSQL("table.col", [])), "RawSQL(table.col, [])") self.assertEqual( repr(Ref("sum_cost", Sum("cost"))), "Ref(sum_cost, Sum(F(cost)))" ) self.assertEqual(repr(Value(1)), "Value(1)") self.assertEqual( repr(ExpressionList(F("col"), F("anothercol"))), "ExpressionList(F(col), F(anothercol))", ) self.assertEqual( repr(ExpressionList(OrderBy(F("col"), descending=False))), "ExpressionList(OrderBy(F(col), descending=False))", ) def test_functions(self): self.assertEqual(repr(Coalesce("a", "b")), "Coalesce(F(a), F(b))") self.assertEqual(repr(Concat("a", "b")), "Concat(ConcatPair(F(a), F(b)))") self.assertEqual(repr(Length("a")), "Length(F(a))") self.assertEqual(repr(Lower("a")), "Lower(F(a))") self.assertEqual(repr(Substr("a", 1, 3)), "Substr(F(a), Value(1), Value(3))") self.assertEqual(repr(Upper("a")), "Upper(F(a))") def test_aggregates(self): self.assertEqual(repr(Avg("a")), "Avg(F(a))") self.assertEqual(repr(Count("a")), "Count(F(a))") self.assertEqual(repr(Count("*")), "Count('*')") self.assertEqual(repr(Max("a")), "Max(F(a))") self.assertEqual(repr(Min("a")), "Min(F(a))") self.assertEqual(repr(StdDev("a")), "StdDev(F(a), sample=False)") self.assertEqual(repr(Sum("a")), "Sum(F(a))") self.assertEqual( repr(Variance("a", sample=True)), "Variance(F(a), sample=True)" ) def test_distinct_aggregates(self): self.assertEqual(repr(Count("a", distinct=True)), "Count(F(a), distinct=True)") self.assertEqual(repr(Count("*", distinct=True)), "Count('*', distinct=True)") def test_filtered_aggregates(self): filter = Q(a=1) self.assertEqual( repr(Avg("a", filter=filter)), "Avg(F(a), filter=(AND: ('a', 1)))" ) self.assertEqual( repr(Count("a", filter=filter)), "Count(F(a), filter=(AND: ('a', 1)))" ) self.assertEqual( repr(Max("a", filter=filter)), "Max(F(a), filter=(AND: ('a', 1)))" ) self.assertEqual( repr(Min("a", filter=filter)), "Min(F(a), filter=(AND: ('a', 1)))" ) self.assertEqual( repr(StdDev("a", filter=filter)), "StdDev(F(a), filter=(AND: ('a', 1)), sample=False)", ) self.assertEqual( repr(Sum("a", filter=filter)), "Sum(F(a), filter=(AND: ('a', 1)))" ) self.assertEqual( repr(Variance("a", sample=True, filter=filter)), "Variance(F(a), filter=(AND: ('a', 1)), sample=True)", ) self.assertEqual( repr(Count("a", filter=filter, distinct=True)), "Count(F(a), distinct=True, filter=(AND: ('a', 1)))", ) class CombinableTests(SimpleTestCase): bitwise_msg = ( "Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations." ) def test_negation(self): c = Combinable() self.assertEqual(-c, c * -1) def test_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() & Combinable() def test_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() | Combinable() def test_xor(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): Combinable() ^ Combinable() def test_reversed_and(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() & Combinable() def test_reversed_or(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() | Combinable() def test_reversed_xor(self): with self.assertRaisesMessage(NotImplementedError, self.bitwise_msg): object() ^ Combinable() class CombinedExpressionTests(SimpleTestCase): def test_resolve_output_field_number(self): tests = [ (IntegerField, AutoField, IntegerField), (AutoField, IntegerField, IntegerField), (IntegerField, DecimalField, DecimalField), (DecimalField, IntegerField, DecimalField), (IntegerField, FloatField, FloatField), (FloatField, IntegerField, FloatField), ] connectors = [ Combinable.ADD, Combinable.SUB, Combinable.MUL, Combinable.DIV, Combinable.MOD, ] for lhs, rhs, combined in tests: for connector in connectors: with self.subTest( lhs=lhs, connector=connector, rhs=rhs, combined=combined ): expr = CombinedExpression( Expression(lhs()), connector, Expression(rhs()), ) self.assertIsInstance(expr.output_field, combined) def test_resolve_output_field_with_null(self): def null(): return Value(None) tests = [ # Numbers. (AutoField, Combinable.ADD, null), (DecimalField, Combinable.ADD, null), (FloatField, Combinable.ADD, null), (IntegerField, Combinable.ADD, null), (IntegerField, Combinable.SUB, null), (null, Combinable.ADD, IntegerField), # Dates. (DateField, Combinable.ADD, null), (DateTimeField, Combinable.ADD, null), (DurationField, Combinable.ADD, null), (TimeField, Combinable.ADD, null), (TimeField, Combinable.SUB, null), (null, Combinable.ADD, DateTimeField), (DateField, Combinable.SUB, null), ] for lhs, connector, rhs in tests: msg = ( f"Cannot infer type of {connector!r} expression involving these types: " ) with self.subTest(lhs=lhs, connector=connector, rhs=rhs): expr = CombinedExpression( Expression(lhs()), connector, Expression(rhs()), ) with self.assertRaisesMessage(FieldError, msg): expr.output_field def test_resolve_output_field_dates(self): tests = [ # Add - same type. (DateField, Combinable.ADD, DateField, FieldError), (DateTimeField, Combinable.ADD, DateTimeField, FieldError), (TimeField, Combinable.ADD, TimeField, FieldError), (DurationField, Combinable.ADD, DurationField, DurationField), # Add - different type. (DateField, Combinable.ADD, DurationField, DateTimeField), (DateTimeField, Combinable.ADD, DurationField, DateTimeField), (TimeField, Combinable.ADD, DurationField, TimeField), (DurationField, Combinable.ADD, DateField, DateTimeField), (DurationField, Combinable.ADD, DateTimeField, DateTimeField), (DurationField, Combinable.ADD, TimeField, TimeField), # Subtract - same type. (DateField, Combinable.SUB, DateField, DurationField), (DateTimeField, Combinable.SUB, DateTimeField, DurationField), (TimeField, Combinable.SUB, TimeField, DurationField), (DurationField, Combinable.SUB, DurationField, DurationField), # Subtract - different type. (DateField, Combinable.SUB, DurationField, DateTimeField), (DateTimeField, Combinable.SUB, DurationField, DateTimeField), (TimeField, Combinable.SUB, DurationField, TimeField), (DurationField, Combinable.SUB, DateField, FieldError), (DurationField, Combinable.SUB, DateTimeField, FieldError), (DurationField, Combinable.SUB, DateTimeField, FieldError), ] for lhs, connector, rhs, combined in tests: msg = ( f"Cannot infer type of {connector!r} expression involving these types: " ) with self.subTest(lhs=lhs, connector=connector, rhs=rhs, combined=combined): expr = CombinedExpression( Expression(lhs()), connector, Expression(rhs()), ) if issubclass(combined, Exception): with self.assertRaisesMessage(combined, msg): expr.output_field else: self.assertIsInstance(expr.output_field, combined) def test_mixed_char_date_with_annotate(self): queryset = Experiment.objects.annotate(nonsense=F("name") + F("assigned")) msg = ( "Cannot infer type of '+' expression involving these types: CharField, " "DateField. You must set output_field." ) with self.assertRaisesMessage(FieldError, msg): list(queryset) class ExpressionWrapperTests(SimpleTestCase): def test_empty_group_by(self): expr = ExpressionWrapper(Value(3), output_field=IntegerField()) self.assertEqual(expr.get_group_by_cols(), []) def test_non_empty_group_by(self): value = Value("f") value.output_field = None expr = ExpressionWrapper(Lower(value), output_field=IntegerField()) group_by_cols = expr.get_group_by_cols() self.assertEqual(group_by_cols, [expr.expression]) self.assertEqual(group_by_cols[0].output_field, expr.output_field) class NegatedExpressionTests(TestCase): @classmethod def setUpTestData(cls): ceo = Employee.objects.create(firstname="Joe", lastname="Smith", salary=10) cls.eu_company = Company.objects.create( name="Example Inc.", num_employees=2300, num_chairs=5, ceo=ceo, based_in_eu=True, ) cls.non_eu_company = Company.objects.create( name="Foobar Ltd.", num_employees=3, num_chairs=4, ceo=ceo, based_in_eu=False, ) def test_invert(self): f = F("field") self.assertEqual(~f, NegatedExpression(f)) self.assertIsNot(~~f, f) self.assertEqual(~~f, f) def test_filter(self): self.assertSequenceEqual( Company.objects.filter(~F("based_in_eu")), [self.non_eu_company], ) qs = Company.objects.annotate(eu_required=~Value(False)) self.assertSequenceEqual( qs.filter(based_in_eu=F("eu_required")).order_by("eu_required"), [self.eu_company], ) self.assertSequenceEqual( qs.filter(based_in_eu=~~F("eu_required")), [self.eu_company], ) self.assertSequenceEqual( qs.filter(based_in_eu=~F("eu_required")), [self.non_eu_company], ) self.assertSequenceEqual(qs.filter(based_in_eu=~F("based_in_eu")), []) def test_values(self): self.assertSequenceEqual( Company.objects.annotate(negated=~F("based_in_eu")) .values_list("name", "negated") .order_by("name"), [("Example Inc.", False), ("Foobar Ltd.", True)], ) class OrderByTests(SimpleTestCase): def test_equal(self): self.assertEqual( OrderBy(F("field"), nulls_last=True), OrderBy(F("field"), nulls_last=True), ) self.assertNotEqual( OrderBy(F("field"), nulls_last=True), OrderBy(F("field")), ) def test_hash(self): self.assertEqual( hash(OrderBy(F("field"), nulls_last=True)), hash(OrderBy(F("field"), nulls_last=True)), ) self.assertNotEqual( hash(OrderBy(F("field"), nulls_last=True)), hash(OrderBy(F("field"))), ) def test_nulls_false(self): msg = "nulls_first and nulls_last values must be True or None." with self.assertRaisesMessage(ValueError, msg): OrderBy(F("field"), nulls_first=False) with self.assertRaisesMessage(ValueError, msg): OrderBy(F("field"), nulls_last=False) with self.assertRaisesMessage(ValueError, msg): F("field").asc(nulls_first=False) with self.assertRaisesMessage(ValueError, msg): F("field").desc(nulls_last=False)
aa6eaeb721c6cb5ed5f784a1b22bc930e8788c587352e1637f95bb71bd8640a0
import copy import datetime import pickle from operator import attrgetter from django.core.exceptions import FieldError from django.db import models from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature from django.test.utils import isolate_apps from django.utils import translation from .models import ( Article, ArticleIdea, ArticleTag, ArticleTranslation, Country, Friendship, Group, Membership, NewsArticle, Person, ) # Note that these tests are testing internal implementation details. # ForeignObject is not part of public API. class MultiColumnFKTests(TestCase): @classmethod def setUpTestData(cls): # Creating countries cls.usa = Country.objects.create(name="United States of America") cls.soviet_union = Country.objects.create(name="Soviet Union") # Creating People cls.bob = Person.objects.create(name="Bob", person_country=cls.usa) cls.jim = Person.objects.create(name="Jim", person_country=cls.usa) cls.george = Person.objects.create(name="George", person_country=cls.usa) cls.jane = Person.objects.create(name="Jane", person_country=cls.soviet_union) cls.mark = Person.objects.create(name="Mark", person_country=cls.soviet_union) cls.sam = Person.objects.create(name="Sam", person_country=cls.soviet_union) # Creating Groups cls.kgb = Group.objects.create(name="KGB", group_country=cls.soviet_union) cls.cia = Group.objects.create(name="CIA", group_country=cls.usa) cls.republican = Group.objects.create(name="Republican", group_country=cls.usa) cls.democrat = Group.objects.create(name="Democrat", group_country=cls.usa) def test_get_succeeds_on_multicolumn_match(self): # Membership objects have access to their related Person if both # country_ids match between them membership = Membership.objects.create( membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id, ) person = membership.person self.assertEqual((person.id, person.name), (self.bob.id, "Bob")) def test_get_fails_on_multicolumn_mismatch(self): # Membership objects returns DoesNotExist error when there is no # Person with the same id and country_id membership = Membership.objects.create( membership_country_id=self.usa.id, person_id=self.jane.id, group_id=self.cia.id, ) with self.assertRaises(Person.DoesNotExist): getattr(membership, "person") def test_reverse_query_returns_correct_result(self): # Creating a valid membership because it has the same country has the person Membership.objects.create( membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id, ) # Creating an invalid membership because it has a different country has # the person. Membership.objects.create( membership_country_id=self.soviet_union.id, person_id=self.bob.id, group_id=self.republican.id, ) with self.assertNumQueries(1): membership = self.bob.membership_set.get() self.assertEqual(membership.group_id, self.cia.id) self.assertIs(membership.person, self.bob) def test_query_filters_correctly(self): # Creating a to valid memberships Membership.objects.create( membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id, ) Membership.objects.create( membership_country_id=self.usa.id, person_id=self.jim.id, group_id=self.cia.id, ) # Creating an invalid membership Membership.objects.create( membership_country_id=self.soviet_union.id, person_id=self.george.id, group_id=self.cia.id, ) self.assertQuerySetEqual( Membership.objects.filter(person__name__contains="o"), [self.bob.id], attrgetter("person_id"), ) def test_reverse_query_filters_correctly(self): timemark = datetime.datetime.now(tz=datetime.timezone.utc).replace(tzinfo=None) timedelta = datetime.timedelta(days=1) # Creating a to valid memberships Membership.objects.create( membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id, date_joined=timemark - timedelta, ) Membership.objects.create( membership_country_id=self.usa.id, person_id=self.jim.id, group_id=self.cia.id, date_joined=timemark + timedelta, ) # Creating an invalid membership Membership.objects.create( membership_country_id=self.soviet_union.id, person_id=self.george.id, group_id=self.cia.id, date_joined=timemark + timedelta, ) self.assertQuerySetEqual( Person.objects.filter(membership__date_joined__gte=timemark), ["Jim"], attrgetter("name"), ) def test_forward_in_lookup_filters_correctly(self): Membership.objects.create( membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id, ) Membership.objects.create( membership_country_id=self.usa.id, person_id=self.jim.id, group_id=self.cia.id, ) # Creating an invalid membership Membership.objects.create( membership_country_id=self.soviet_union.id, person_id=self.george.id, group_id=self.cia.id, ) self.assertQuerySetEqual( Membership.objects.filter(person__in=[self.george, self.jim]), [ self.jim.id, ], attrgetter("person_id"), ) self.assertQuerySetEqual( Membership.objects.filter(person__in=Person.objects.filter(name="Jim")), [ self.jim.id, ], attrgetter("person_id"), ) def test_double_nested_query(self): m1 = Membership.objects.create( membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id, ) m2 = Membership.objects.create( membership_country_id=self.usa.id, person_id=self.jim.id, group_id=self.cia.id, ) Friendship.objects.create( from_friend_country_id=self.usa.id, from_friend_id=self.bob.id, to_friend_country_id=self.usa.id, to_friend_id=self.jim.id, ) self.assertSequenceEqual( Membership.objects.filter( person__in=Person.objects.filter( from_friend__in=Friendship.objects.filter( to_friend__in=Person.objects.all() ) ) ), [m1], ) self.assertSequenceEqual( Membership.objects.exclude( person__in=Person.objects.filter( from_friend__in=Friendship.objects.filter( to_friend__in=Person.objects.all() ) ) ), [m2], ) def test_select_related_foreignkey_forward_works(self): Membership.objects.create( membership_country=self.usa, person=self.bob, group=self.cia ) Membership.objects.create( membership_country=self.usa, person=self.jim, group=self.democrat ) with self.assertNumQueries(1): people = [ m.person for m in Membership.objects.select_related("person").order_by("pk") ] normal_people = [m.person for m in Membership.objects.order_by("pk")] self.assertEqual(people, normal_people) def test_prefetch_foreignkey_forward_works(self): Membership.objects.create( membership_country=self.usa, person=self.bob, group=self.cia ) Membership.objects.create( membership_country=self.usa, person=self.jim, group=self.democrat ) with self.assertNumQueries(2): people = [ m.person for m in Membership.objects.prefetch_related("person").order_by("pk") ] normal_people = [m.person for m in Membership.objects.order_by("pk")] self.assertEqual(people, normal_people) def test_prefetch_foreignkey_reverse_works(self): Membership.objects.create( membership_country=self.usa, person=self.bob, group=self.cia ) Membership.objects.create( membership_country=self.usa, person=self.jim, group=self.democrat ) with self.assertNumQueries(2): membership_sets = [ list(p.membership_set.all()) for p in Person.objects.prefetch_related("membership_set").order_by( "pk" ) ] with self.assertNumQueries(7): normal_membership_sets = [ list(p.membership_set.all()) for p in Person.objects.order_by("pk") ] self.assertEqual(membership_sets, normal_membership_sets) def test_m2m_through_forward_returns_valid_members(self): # We start out by making sure that the Group 'CIA' has no members. self.assertQuerySetEqual(self.cia.members.all(), []) Membership.objects.create( membership_country=self.usa, person=self.bob, group=self.cia ) Membership.objects.create( membership_country=self.usa, person=self.jim, group=self.cia ) # Bob and Jim should be members of the CIA. self.assertQuerySetEqual( self.cia.members.all(), ["Bob", "Jim"], attrgetter("name") ) def test_m2m_through_reverse_returns_valid_members(self): # We start out by making sure that Bob is in no groups. self.assertQuerySetEqual(self.bob.groups.all(), []) Membership.objects.create( membership_country=self.usa, person=self.bob, group=self.cia ) Membership.objects.create( membership_country=self.usa, person=self.bob, group=self.republican ) # Bob should be in the CIA and a Republican self.assertQuerySetEqual( self.bob.groups.all(), ["CIA", "Republican"], attrgetter("name") ) def test_m2m_through_forward_ignores_invalid_members(self): # We start out by making sure that the Group 'CIA' has no members. self.assertQuerySetEqual(self.cia.members.all(), []) # Something adds jane to group CIA but Jane is in Soviet Union which # isn't CIA's country. Membership.objects.create( membership_country=self.usa, person=self.jane, group=self.cia ) # There should still be no members in CIA self.assertQuerySetEqual(self.cia.members.all(), []) def test_m2m_through_reverse_ignores_invalid_members(self): # We start out by making sure that Jane has no groups. self.assertQuerySetEqual(self.jane.groups.all(), []) # Something adds jane to group CIA but Jane is in Soviet Union which # isn't CIA's country. Membership.objects.create( membership_country=self.usa, person=self.jane, group=self.cia ) # Jane should still not be in any groups self.assertQuerySetEqual(self.jane.groups.all(), []) def test_m2m_through_on_self_works(self): self.assertQuerySetEqual(self.jane.friends.all(), []) Friendship.objects.create( from_friend_country=self.jane.person_country, from_friend=self.jane, to_friend_country=self.george.person_country, to_friend=self.george, ) self.assertQuerySetEqual( self.jane.friends.all(), ["George"], attrgetter("name") ) def test_m2m_through_on_self_ignores_mismatch_columns(self): self.assertQuerySetEqual(self.jane.friends.all(), []) # Note that we use ids instead of instances. This is because instances # on ForeignObject properties will set all related field off of the # given instance. Friendship.objects.create( from_friend_id=self.jane.id, to_friend_id=self.george.id, to_friend_country_id=self.jane.person_country_id, from_friend_country_id=self.george.person_country_id, ) self.assertQuerySetEqual(self.jane.friends.all(), []) def test_prefetch_related_m2m_forward_works(self): Membership.objects.create( membership_country=self.usa, person=self.bob, group=self.cia ) Membership.objects.create( membership_country=self.usa, person=self.jim, group=self.democrat ) with self.assertNumQueries(2): members_lists = [ list(g.members.all()) for g in Group.objects.prefetch_related("members") ] normal_members_lists = [list(g.members.all()) for g in Group.objects.all()] self.assertEqual(members_lists, normal_members_lists) def test_prefetch_related_m2m_reverse_works(self): Membership.objects.create( membership_country=self.usa, person=self.bob, group=self.cia ) Membership.objects.create( membership_country=self.usa, person=self.jim, group=self.democrat ) with self.assertNumQueries(2): groups_lists = [ list(p.groups.all()) for p in Person.objects.prefetch_related("groups") ] normal_groups_lists = [list(p.groups.all()) for p in Person.objects.all()] self.assertEqual(groups_lists, normal_groups_lists) @translation.override("fi") def test_translations(self): a1 = Article.objects.create(pub_date=datetime.date.today()) at1_fi = ArticleTranslation( article=a1, lang="fi", title="Otsikko", body="Diipadaapa" ) at1_fi.save() at2_en = ArticleTranslation( article=a1, lang="en", title="Title", body="Lalalalala" ) at2_en.save() self.assertEqual(Article.objects.get(pk=a1.pk).active_translation, at1_fi) with self.assertNumQueries(1): fetched = Article.objects.select_related("active_translation").get( active_translation__title="Otsikko" ) self.assertEqual(fetched.active_translation.title, "Otsikko") a2 = Article.objects.create(pub_date=datetime.date.today()) at2_fi = ArticleTranslation( article=a2, lang="fi", title="Atsikko", body="Diipadaapa", abstract="dipad" ) at2_fi.save() a3 = Article.objects.create(pub_date=datetime.date.today()) at3_en = ArticleTranslation( article=a3, lang="en", title="A title", body="lalalalala", abstract="lala" ) at3_en.save() # Test model initialization with active_translation field. a3 = Article(id=a3.id, pub_date=a3.pub_date, active_translation=at3_en) a3.save() self.assertEqual( list(Article.objects.filter(active_translation__abstract=None)), [a1, a3] ) self.assertEqual( list( Article.objects.filter( active_translation__abstract=None, active_translation__pk__isnull=False, ) ), [a1], ) with translation.override("en"): self.assertEqual( list(Article.objects.filter(active_translation__abstract=None)), [a1, a2], ) def test_foreign_key_raises_informative_does_not_exist(self): referrer = ArticleTranslation() with self.assertRaisesMessage( Article.DoesNotExist, "ArticleTranslation has no article" ): referrer.article def test_foreign_key_related_query_name(self): a1 = Article.objects.create(pub_date=datetime.date.today()) ArticleTag.objects.create(article=a1, name="foo") self.assertEqual(Article.objects.filter(tag__name="foo").count(), 1) self.assertEqual(Article.objects.filter(tag__name="bar").count(), 0) msg = ( "Cannot resolve keyword 'tags' into field. Choices are: " "active_translation, active_translation_q, articletranslation, " "id, idea_things, newsarticle, pub_date, tag" ) with self.assertRaisesMessage(FieldError, msg): Article.objects.filter(tags__name="foo") def test_many_to_many_related_query_name(self): a1 = Article.objects.create(pub_date=datetime.date.today()) i1 = ArticleIdea.objects.create(name="idea1") a1.ideas.add(i1) self.assertEqual(Article.objects.filter(idea_things__name="idea1").count(), 1) self.assertEqual(Article.objects.filter(idea_things__name="idea2").count(), 0) msg = ( "Cannot resolve keyword 'ideas' into field. Choices are: " "active_translation, active_translation_q, articletranslation, " "id, idea_things, newsarticle, pub_date, tag" ) with self.assertRaisesMessage(FieldError, msg): Article.objects.filter(ideas__name="idea1") @translation.override("fi") def test_inheritance(self): na = NewsArticle.objects.create(pub_date=datetime.date.today()) ArticleTranslation.objects.create( article=na, lang="fi", title="foo", body="bar" ) self.assertSequenceEqual( NewsArticle.objects.select_related("active_translation"), [na] ) with self.assertNumQueries(1): self.assertEqual( NewsArticle.objects.select_related("active_translation")[ 0 ].active_translation.title, "foo", ) @skipUnlessDBFeature("has_bulk_insert") def test_batch_create_foreign_object(self): objs = [ Person(name="abcd_%s" % i, person_country=self.usa) for i in range(0, 5) ] Person.objects.bulk_create(objs, 10) def test_isnull_lookup(self): m1 = Membership.objects.create( membership_country=self.usa, person=self.bob, group_id=None ) m2 = Membership.objects.create( membership_country=self.usa, person=self.bob, group=self.cia ) self.assertSequenceEqual( Membership.objects.filter(group__isnull=True), [m1], ) self.assertSequenceEqual( Membership.objects.filter(group__isnull=False), [m2], ) class TestModelCheckTests(SimpleTestCase): @isolate_apps("foreign_object") def test_check_composite_foreign_object(self): class Parent(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() class Meta: unique_together = (("a", "b"),) class Child(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() value = models.CharField(max_length=255) parent = models.ForeignObject( Parent, on_delete=models.SET_NULL, from_fields=("a", "b"), to_fields=("a", "b"), related_name="children", ) self.assertEqual(Child._meta.get_field("parent").check(from_model=Child), []) @isolate_apps("foreign_object") def test_check_subset_composite_foreign_object(self): class Parent(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() c = models.PositiveIntegerField() class Meta: unique_together = (("a", "b"),) class Child(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() c = models.PositiveIntegerField() d = models.CharField(max_length=255) parent = models.ForeignObject( Parent, on_delete=models.SET_NULL, from_fields=("a", "b", "c"), to_fields=("a", "b", "c"), related_name="children", ) self.assertEqual(Child._meta.get_field("parent").check(from_model=Child), []) class TestExtraJoinFilterQ(TestCase): @translation.override("fi") def test_extra_join_filter_q(self): a = Article.objects.create(pub_date=datetime.datetime.today()) ArticleTranslation.objects.create( article=a, lang="fi", title="title", body="body" ) qs = Article.objects.all() with self.assertNumQueries(2): self.assertEqual(qs[0].active_translation_q.title, "title") qs = qs.select_related("active_translation_q") with self.assertNumQueries(1): self.assertEqual(qs[0].active_translation_q.title, "title") class TestCachedPathInfo(TestCase): def test_equality(self): """ The path_infos and reverse_path_infos attributes are equivalent to calling the get_<method>() with no arguments. """ foreign_object = Membership._meta.get_field("person") self.assertEqual( foreign_object.path_infos, foreign_object.get_path_info(), ) self.assertEqual( foreign_object.reverse_path_infos, foreign_object.get_reverse_path_info(), ) def test_copy_removes_direct_cached_values(self): """ Shallow copying a ForeignObject (or a ForeignObjectRel) removes the object's direct cached PathInfo values. """ foreign_object = Membership._meta.get_field("person") # Trigger storage of cached_property into ForeignObject's __dict__. foreign_object.path_infos foreign_object.reverse_path_infos # The ForeignObjectRel doesn't have reverse_path_infos. foreign_object.remote_field.path_infos self.assertIn("path_infos", foreign_object.__dict__) self.assertIn("reverse_path_infos", foreign_object.__dict__) self.assertIn("path_infos", foreign_object.remote_field.__dict__) # Cached value is removed via __getstate__() on ForeignObjectRel # because no __copy__() method exists, so __reduce_ex__() is used. remote_field_copy = copy.copy(foreign_object.remote_field) self.assertNotIn("path_infos", remote_field_copy.__dict__) # Cached values are removed via __copy__() on ForeignObject for # consistency of behavior. foreign_object_copy = copy.copy(foreign_object) self.assertNotIn("path_infos", foreign_object_copy.__dict__) self.assertNotIn("reverse_path_infos", foreign_object_copy.__dict__) # ForeignObjectRel's remains because it's part of a shallow copy. self.assertIn("path_infos", foreign_object_copy.remote_field.__dict__) def test_deepcopy_removes_cached_values(self): """ Deep copying a ForeignObject removes the object's cached PathInfo values, including those of the related ForeignObjectRel. """ foreign_object = Membership._meta.get_field("person") # Trigger storage of cached_property into ForeignObject's __dict__. foreign_object.path_infos foreign_object.reverse_path_infos # The ForeignObjectRel doesn't have reverse_path_infos. foreign_object.remote_field.path_infos self.assertIn("path_infos", foreign_object.__dict__) self.assertIn("reverse_path_infos", foreign_object.__dict__) self.assertIn("path_infos", foreign_object.remote_field.__dict__) # Cached value is removed via __getstate__() on ForeignObjectRel # because no __deepcopy__() method exists, so __reduce_ex__() is used. remote_field_copy = copy.deepcopy(foreign_object.remote_field) self.assertNotIn("path_infos", remote_field_copy.__dict__) # Field.__deepcopy__() internally uses __copy__() on both the # ForeignObject and ForeignObjectRel, so all cached values are removed. foreign_object_copy = copy.deepcopy(foreign_object) self.assertNotIn("path_infos", foreign_object_copy.__dict__) self.assertNotIn("reverse_path_infos", foreign_object_copy.__dict__) self.assertNotIn("path_infos", foreign_object_copy.remote_field.__dict__) def test_pickling_foreignobjectrel(self): """ Pickling a ForeignObjectRel removes the path_infos attribute. ForeignObjectRel implements __getstate__(), so copy and pickle modules both use that, but ForeignObject implements __reduce__() and __copy__() separately, so doesn't share the same behaviour. """ foreign_object_rel = Membership._meta.get_field("person").remote_field # Trigger storage of cached_property into ForeignObjectRel's __dict__. foreign_object_rel.path_infos self.assertIn("path_infos", foreign_object_rel.__dict__) foreign_object_rel_restored = pickle.loads(pickle.dumps(foreign_object_rel)) self.assertNotIn("path_infos", foreign_object_rel_restored.__dict__) def test_pickling_foreignobject(self): """ Pickling a ForeignObject does not remove the cached PathInfo values. ForeignObject will always keep the path_infos and reverse_path_infos attributes within the same process, because of the way Field.__reduce__() is used for restoring values. """ foreign_object = Membership._meta.get_field("person") # Trigger storage of cached_property into ForeignObjectRel's __dict__ foreign_object.path_infos foreign_object.reverse_path_infos self.assertIn("path_infos", foreign_object.__dict__) self.assertIn("reverse_path_infos", foreign_object.__dict__) foreign_object_restored = pickle.loads(pickle.dumps(foreign_object)) self.assertIn("path_infos", foreign_object_restored.__dict__) self.assertIn("reverse_path_infos", foreign_object_restored.__dict__)
5275ef64404ad60d317e4eff092090bba8ffee5db84143ea6860e35f65770dcd
import datetime import re import sys import zoneinfo from contextlib import contextmanager from unittest import SkipTest, skipIf from xml.dom.minidom import parseString from django.contrib.auth.models import User from django.core import serializers from django.db import connection from django.db.models import F, Max, Min from django.http import HttpRequest from django.template import ( Context, RequestContext, Template, TemplateSyntaxError, context_processors, ) from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import requires_tz_support from django.urls import reverse from django.utils import timezone, translation from django.utils.timezone import timedelta from .forms import ( EventForm, EventLocalizedForm, EventLocalizedModelForm, EventModelForm, EventSplitForm, ) from .models import ( AllDayEvent, DailyEvent, Event, MaybeEvent, Session, SessionEvent, Timestamp, ) try: import yaml HAS_YAML = True except ImportError: HAS_YAML = False # These tests use the EAT (Eastern Africa Time) and ICT (Indochina Time) # who don't have daylight saving time, so we can represent them easily # with fixed offset timezones and use them directly as tzinfo in the # constructors. # settings.TIME_ZONE is forced to EAT. Most tests use a variant of # datetime.datetime(2011, 9, 1, 13, 20, 30), which translates to # 10:20:30 in UTC and 17:20:30 in ICT. UTC = datetime.timezone.utc EAT = timezone.get_fixed_timezone(180) # Africa/Nairobi ICT = timezone.get_fixed_timezone(420) # Asia/Bangkok @contextmanager def override_database_connection_timezone(timezone): try: orig_timezone = connection.settings_dict["TIME_ZONE"] connection.settings_dict["TIME_ZONE"] = timezone # Clear cached properties, after first accessing them to ensure they exist. connection.timezone del connection.timezone connection.timezone_name del connection.timezone_name yield finally: connection.settings_dict["TIME_ZONE"] = orig_timezone # Clear cached properties, after first accessing them to ensure they exist. connection.timezone del connection.timezone connection.timezone_name del connection.timezone_name @override_settings(TIME_ZONE="Africa/Nairobi", USE_TZ=False) class LegacyDatabaseTests(TestCase): def test_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) def test_naive_datetime_with_microsecond(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) @skipUnlessDBFeature("supports_timezones") def test_aware_datetime_in_local_timezone(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # interpret the naive datetime in local time to get the correct value self.assertEqual(event.dt.replace(tzinfo=EAT), dt) @skipUnlessDBFeature("supports_timezones") def test_aware_datetime_in_local_timezone_with_microsecond(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # interpret the naive datetime in local time to get the correct value self.assertEqual(event.dt.replace(tzinfo=EAT), dt) @skipUnlessDBFeature("supports_timezones") def test_aware_datetime_in_utc(self): dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # interpret the naive datetime in local time to get the correct value self.assertEqual(event.dt.replace(tzinfo=EAT), dt) @skipUnlessDBFeature("supports_timezones") def test_aware_datetime_in_other_timezone(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertIsNone(event.dt.tzinfo) # interpret the naive datetime in local time to get the correct value self.assertEqual(event.dt.replace(tzinfo=EAT), dt) @skipIfDBFeature("supports_timezones") def test_aware_datetime_unsupported(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) msg = "backend does not support timezone-aware datetimes when USE_TZ is False." with self.assertRaisesMessage(ValueError, msg): Event.objects.create(dt=dt) def test_auto_now_and_auto_now_add(self): now = datetime.datetime.now() past = now - datetime.timedelta(seconds=2) future = now + datetime.timedelta(seconds=2) Timestamp.objects.create() ts = Timestamp.objects.get() self.assertLess(past, ts.created) self.assertLess(past, ts.updated) self.assertGreater(future, ts.updated) self.assertGreater(future, ts.updated) def test_query_filter(self): dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30) dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30) Event.objects.create(dt=dt1) Event.objects.create(dt=dt2) self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2) self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1) self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1) self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0) def test_query_datetime_lookups(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0)) self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2) self.assertEqual(Event.objects.filter(dt__month=1).count(), 2) self.assertEqual(Event.objects.filter(dt__day=1).count(), 2) self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2) self.assertEqual(Event.objects.filter(dt__iso_week_day=6).count(), 2) self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1) self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2) self.assertEqual(Event.objects.filter(dt__second=0).count(), 2) def test_query_aggregation(self): # Only min and max make sense for datetimes. Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20)) Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30)) Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40)) result = Event.objects.aggregate(Min("dt"), Max("dt")) self.assertEqual( result, { "dt__min": datetime.datetime(2011, 9, 1, 3, 20, 40), "dt__max": datetime.datetime(2011, 9, 1, 23, 20, 20), }, ) def test_query_annotation(self): # Only min and max make sense for datetimes. morning = Session.objects.create(name="morning") afternoon = Session.objects.create(name="afternoon") SessionEvent.objects.create( dt=datetime.datetime(2011, 9, 1, 23, 20, 20), session=afternoon ) SessionEvent.objects.create( dt=datetime.datetime(2011, 9, 1, 13, 20, 30), session=afternoon ) SessionEvent.objects.create( dt=datetime.datetime(2011, 9, 1, 3, 20, 40), session=morning ) morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40) afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30) self.assertQuerySetEqual( Session.objects.annotate(dt=Min("events__dt")).order_by("dt"), [morning_min_dt, afternoon_min_dt], transform=lambda d: d.dt, ) self.assertQuerySetEqual( Session.objects.annotate(dt=Min("events__dt")).filter( dt__lt=afternoon_min_dt ), [morning_min_dt], transform=lambda d: d.dt, ) self.assertQuerySetEqual( Session.objects.annotate(dt=Min("events__dt")).filter( dt__gte=afternoon_min_dt ), [afternoon_min_dt], transform=lambda d: d.dt, ) def test_query_datetimes(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0)) self.assertSequenceEqual( Event.objects.datetimes("dt", "year"), [datetime.datetime(2011, 1, 1, 0, 0, 0)], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "month"), [datetime.datetime(2011, 1, 1, 0, 0, 0)], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "day"), [datetime.datetime(2011, 1, 1, 0, 0, 0)], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "hour"), [ datetime.datetime(2011, 1, 1, 1, 0, 0), datetime.datetime(2011, 1, 1, 4, 0, 0), ], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "minute"), [ datetime.datetime(2011, 1, 1, 1, 30, 0), datetime.datetime(2011, 1, 1, 4, 30, 0), ], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "second"), [ datetime.datetime(2011, 1, 1, 1, 30, 0), datetime.datetime(2011, 1, 1, 4, 30, 0), ], ) def test_raw_sql(self): # Regression test for #17755 dt = datetime.datetime(2011, 9, 1, 13, 20, 30) event = Event.objects.create(dt=dt) self.assertEqual( list( Event.objects.raw("SELECT * FROM timezones_event WHERE dt = %s", [dt]) ), [event], ) def test_cursor_execute_accepts_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30) with connection.cursor() as cursor: cursor.execute("INSERT INTO timezones_event (dt) VALUES (%s)", [dt]) event = Event.objects.get() self.assertEqual(event.dt, dt) def test_cursor_execute_returns_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30) Event.objects.create(dt=dt) with connection.cursor() as cursor: cursor.execute("SELECT dt FROM timezones_event WHERE dt = %s", [dt]) self.assertEqual(cursor.fetchall()[0][0], dt) def test_filter_date_field_with_aware_datetime(self): # Regression test for #17742 day = datetime.date(2011, 9, 1) AllDayEvent.objects.create(day=day) # This is 2011-09-02T01:30:00+03:00 in EAT dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC) self.assertTrue(AllDayEvent.objects.filter(day__gte=dt).exists()) @override_settings(TIME_ZONE="Africa/Nairobi", USE_TZ=True) class NewDatabaseTests(TestCase): naive_warning = "DateTimeField Event.dt received a naive datetime" @skipIfDBFeature("supports_timezones") def test_aware_time_unsupported(self): t = datetime.time(13, 20, 30, tzinfo=EAT) msg = "backend does not support timezone-aware times." with self.assertRaisesMessage(ValueError, msg): DailyEvent.objects.create(time=t) @requires_tz_support def test_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30) with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): Event.objects.create(dt=dt) event = Event.objects.get() # naive datetimes are interpreted in local time self.assertEqual(event.dt, dt.replace(tzinfo=EAT)) @requires_tz_support def test_datetime_from_date(self): dt = datetime.date(2011, 9, 1) with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, datetime.datetime(2011, 9, 1, tzinfo=EAT)) @requires_tz_support def test_naive_datetime_with_microsecond(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): Event.objects.create(dt=dt) event = Event.objects.get() # naive datetimes are interpreted in local time self.assertEqual(event.dt, dt.replace(tzinfo=EAT)) def test_aware_datetime_in_local_timezone(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) def test_aware_datetime_in_local_timezone_with_microsecond(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) def test_aware_datetime_in_utc(self): dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) def test_aware_datetime_in_other_timezone(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) Event.objects.create(dt=dt) event = Event.objects.get() self.assertEqual(event.dt, dt) def test_auto_now_and_auto_now_add(self): now = timezone.now() past = now - datetime.timedelta(seconds=2) future = now + datetime.timedelta(seconds=2) Timestamp.objects.create() ts = Timestamp.objects.get() self.assertLess(past, ts.created) self.assertLess(past, ts.updated) self.assertGreater(future, ts.updated) self.assertGreater(future, ts.updated) def test_query_filter(self): dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT) dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30, tzinfo=EAT) Event.objects.create(dt=dt1) Event.objects.create(dt=dt2) self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2) self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1) self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1) self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0) def test_query_filter_with_timezones(self): tz = zoneinfo.ZoneInfo("Europe/Paris") dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=tz) Event.objects.create(dt=dt) next = dt + datetime.timedelta(seconds=3) prev = dt - datetime.timedelta(seconds=3) self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1) self.assertEqual(Event.objects.filter(dt__exact=next).count(), 0) self.assertEqual(Event.objects.filter(dt__in=(prev, next)).count(), 0) self.assertEqual(Event.objects.filter(dt__in=(prev, dt, next)).count(), 1) self.assertEqual(Event.objects.filter(dt__range=(prev, next)).count(), 1) def test_query_convert_timezones(self): # Connection timezone is equal to the current timezone, datetime # shouldn't be converted. with override_database_connection_timezone("Africa/Nairobi"): event_datetime = datetime.datetime(2016, 1, 2, 23, 10, 11, 123, tzinfo=EAT) event = Event.objects.create(dt=event_datetime) self.assertEqual( Event.objects.filter(dt__date=event_datetime.date()).first(), event ) # Connection timezone is not equal to the current timezone, datetime # should be converted (-4h). with override_database_connection_timezone("Asia/Bangkok"): event_datetime = datetime.datetime(2016, 1, 2, 3, 10, 11, tzinfo=ICT) event = Event.objects.create(dt=event_datetime) self.assertEqual( Event.objects.filter(dt__date=datetime.date(2016, 1, 1)).first(), event ) @requires_tz_support def test_query_filter_with_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT) Event.objects.create(dt=dt) dt = dt.replace(tzinfo=None) # naive datetimes are interpreted in local time with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1) with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): self.assertEqual(Event.objects.filter(dt__lte=dt).count(), 1) with self.assertWarnsMessage(RuntimeWarning, self.naive_warning): self.assertEqual(Event.objects.filter(dt__gt=dt).count(), 0) @skipUnlessDBFeature("has_zoneinfo_database") def test_query_datetime_lookups(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2) self.assertEqual(Event.objects.filter(dt__month=1).count(), 2) self.assertEqual(Event.objects.filter(dt__day=1).count(), 2) self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2) self.assertEqual(Event.objects.filter(dt__iso_week_day=6).count(), 2) self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1) self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2) self.assertEqual(Event.objects.filter(dt__second=0).count(), 2) @skipUnlessDBFeature("has_zoneinfo_database") def test_query_datetime_lookups_in_other_timezone(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) with timezone.override(UTC): # These two dates fall in the same day in EAT, but in different days, # years and months in UTC. self.assertEqual(Event.objects.filter(dt__year=2011).count(), 1) self.assertEqual(Event.objects.filter(dt__month=1).count(), 1) self.assertEqual(Event.objects.filter(dt__day=1).count(), 1) self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 1) self.assertEqual(Event.objects.filter(dt__iso_week_day=6).count(), 1) self.assertEqual(Event.objects.filter(dt__hour=22).count(), 1) self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2) self.assertEqual(Event.objects.filter(dt__second=0).count(), 2) def test_query_aggregation(self): # Only min and max make sense for datetimes. Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)) result = Event.objects.aggregate(Min("dt"), Max("dt")) self.assertEqual( result, { "dt__min": datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), "dt__max": datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), }, ) def test_query_annotation(self): # Only min and max make sense for datetimes. morning = Session.objects.create(name="morning") afternoon = Session.objects.create(name="afternoon") SessionEvent.objects.create( dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), session=afternoon ) SessionEvent.objects.create( dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), session=afternoon ) SessionEvent.objects.create( dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), session=morning ) morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT) afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) self.assertQuerySetEqual( Session.objects.annotate(dt=Min("events__dt")).order_by("dt"), [morning_min_dt, afternoon_min_dt], transform=lambda d: d.dt, ) self.assertQuerySetEqual( Session.objects.annotate(dt=Min("events__dt")).filter( dt__lt=afternoon_min_dt ), [morning_min_dt], transform=lambda d: d.dt, ) self.assertQuerySetEqual( Session.objects.annotate(dt=Min("events__dt")).filter( dt__gte=afternoon_min_dt ), [afternoon_min_dt], transform=lambda d: d.dt, ) @skipUnlessDBFeature("has_zoneinfo_database") def test_query_datetimes(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) self.assertSequenceEqual( Event.objects.datetimes("dt", "year"), [datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "month"), [datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "day"), [datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "hour"), [ datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=EAT), datetime.datetime(2011, 1, 1, 4, 0, 0, tzinfo=EAT), ], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "minute"), [ datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT), datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT), ], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "second"), [ datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT), datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT), ], ) @skipUnlessDBFeature("has_zoneinfo_database") def test_query_datetimes_in_other_timezone(self): Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT)) Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)) with timezone.override(UTC): self.assertSequenceEqual( Event.objects.datetimes("dt", "year"), [ datetime.datetime(2010, 1, 1, 0, 0, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC), ], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "month"), [ datetime.datetime(2010, 12, 1, 0, 0, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC), ], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "day"), [ datetime.datetime(2010, 12, 31, 0, 0, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC), ], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "hour"), [ datetime.datetime(2010, 12, 31, 22, 0, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=UTC), ], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "minute"), [ datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC), ], ) self.assertSequenceEqual( Event.objects.datetimes("dt", "second"), [ datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC), datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC), ], ) def test_raw_sql(self): # Regression test for #17755 dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) event = Event.objects.create(dt=dt) self.assertSequenceEqual( list( Event.objects.raw("SELECT * FROM timezones_event WHERE dt = %s", [dt]) ), [event], ) @skipUnlessDBFeature("supports_timezones") def test_cursor_execute_accepts_aware_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) with connection.cursor() as cursor: cursor.execute("INSERT INTO timezones_event (dt) VALUES (%s)", [dt]) event = Event.objects.get() self.assertEqual(event.dt, dt) @skipIfDBFeature("supports_timezones") def test_cursor_execute_accepts_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) utc_naive_dt = timezone.make_naive(dt, datetime.timezone.utc) with connection.cursor() as cursor: cursor.execute( "INSERT INTO timezones_event (dt) VALUES (%s)", [utc_naive_dt] ) event = Event.objects.get() self.assertEqual(event.dt, dt) @skipUnlessDBFeature("supports_timezones") def test_cursor_execute_returns_aware_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) Event.objects.create(dt=dt) with connection.cursor() as cursor: cursor.execute("SELECT dt FROM timezones_event WHERE dt = %s", [dt]) self.assertEqual(cursor.fetchall()[0][0], dt) @skipIfDBFeature("supports_timezones") def test_cursor_execute_returns_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) utc_naive_dt = timezone.make_naive(dt, datetime.timezone.utc) Event.objects.create(dt=dt) with connection.cursor() as cursor: cursor.execute( "SELECT dt FROM timezones_event WHERE dt = %s", [utc_naive_dt] ) self.assertEqual(cursor.fetchall()[0][0], utc_naive_dt) @skipUnlessDBFeature("supports_timezones") def test_cursor_explicit_time_zone(self): with override_database_connection_timezone("Europe/Paris"): with connection.cursor() as cursor: cursor.execute("SELECT CURRENT_TIMESTAMP") now = cursor.fetchone()[0] self.assertEqual(str(now.tzinfo), "Europe/Paris") @requires_tz_support def test_filter_date_field_with_aware_datetime(self): # Regression test for #17742 day = datetime.date(2011, 9, 1) AllDayEvent.objects.create(day=day) # This is 2011-09-02T01:30:00+03:00 in EAT dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC) self.assertFalse(AllDayEvent.objects.filter(day__gte=dt).exists()) def test_null_datetime(self): # Regression test for #17294 e = MaybeEvent.objects.create() self.assertIsNone(e.dt) def test_update_with_timedelta(self): initial_dt = timezone.now().replace(microsecond=0) event = Event.objects.create(dt=initial_dt) Event.objects.update(dt=F("dt") + timedelta(hours=2)) event.refresh_from_db() self.assertEqual(event.dt, initial_dt + timedelta(hours=2)) @override_settings(TIME_ZONE="Africa/Nairobi", USE_TZ=True) class ForcedTimeZoneDatabaseTests(TransactionTestCase): """ Test the TIME_ZONE database configuration parameter. Since this involves reading and writing to the same database through two connections, this is a TransactionTestCase. """ available_apps = ["timezones"] @classmethod def setUpClass(cls): # @skipIfDBFeature and @skipUnlessDBFeature cannot be chained. The # outermost takes precedence. Handle skipping manually instead. if connection.features.supports_timezones: raise SkipTest("Database has feature(s) supports_timezones") if not connection.features.test_db_allows_multiple_connections: raise SkipTest( "Database doesn't support feature(s): " "test_db_allows_multiple_connections" ) super().setUpClass() def test_read_datetime(self): fake_dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=UTC) Event.objects.create(dt=fake_dt) with override_database_connection_timezone("Asia/Bangkok"): event = Event.objects.get() dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) self.assertEqual(event.dt, dt) def test_write_datetime(self): dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) with override_database_connection_timezone("Asia/Bangkok"): Event.objects.create(dt=dt) event = Event.objects.get() fake_dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=UTC) self.assertEqual(event.dt, fake_dt) @override_settings(TIME_ZONE="Africa/Nairobi") class SerializationTests(SimpleTestCase): # Backend-specific notes: # - JSON supports only milliseconds, microseconds will be truncated. # - PyYAML dumps the UTC offset correctly for timezone-aware datetimes. # When PyYAML < 5.3 loads this representation, it subtracts the offset # and returns a naive datetime object in UTC. PyYAML 5.3+ loads timezones # correctly. # Tests are adapted to take these quirks into account. def assert_python_contains_datetime(self, objects, dt): self.assertEqual(objects[0]["fields"]["dt"], dt) def assert_json_contains_datetime(self, json, dt): self.assertIn('"fields": {"dt": "%s"}' % dt, json) def assert_xml_contains_datetime(self, xml, dt): field = parseString(xml).getElementsByTagName("field")[0] self.assertXMLEqual(field.childNodes[0].wholeText, dt) def assert_yaml_contains_datetime(self, yaml, dt): # Depending on the yaml dumper, '!timestamp' might be absent self.assertRegex(yaml, r"\n fields: {dt: !(!timestamp)? '%s'}" % re.escape(dt)) def test_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30) data = serializers.serialize("python", [Event(dt=dt)]) self.assert_python_contains_datetime(data, dt) obj = next(serializers.deserialize("python", data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize("json", [Event(dt=dt)]) self.assert_json_contains_datetime(data, "2011-09-01T13:20:30") obj = next(serializers.deserialize("json", data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize("xml", [Event(dt=dt)]) self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30") obj = next(serializers.deserialize("xml", data)).object self.assertEqual(obj.dt, dt) if not isinstance( serializers.get_serializer("yaml"), serializers.BadSerializer ): data = serializers.serialize( "yaml", [Event(dt=dt)], default_flow_style=None ) self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30") obj = next(serializers.deserialize("yaml", data)).object self.assertEqual(obj.dt, dt) def test_naive_datetime_with_microsecond(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) data = serializers.serialize("python", [Event(dt=dt)]) self.assert_python_contains_datetime(data, dt) obj = next(serializers.deserialize("python", data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize("json", [Event(dt=dt)]) self.assert_json_contains_datetime(data, "2011-09-01T13:20:30.405") obj = next(serializers.deserialize("json", data)).object self.assertEqual(obj.dt, dt.replace(microsecond=405000)) data = serializers.serialize("xml", [Event(dt=dt)]) self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30.405060") obj = next(serializers.deserialize("xml", data)).object self.assertEqual(obj.dt, dt) if not isinstance( serializers.get_serializer("yaml"), serializers.BadSerializer ): data = serializers.serialize( "yaml", [Event(dt=dt)], default_flow_style=None ) self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30.405060") obj = next(serializers.deserialize("yaml", data)).object self.assertEqual(obj.dt, dt) def test_aware_datetime_with_microsecond(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, 405060, tzinfo=ICT) data = serializers.serialize("python", [Event(dt=dt)]) self.assert_python_contains_datetime(data, dt) obj = next(serializers.deserialize("python", data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize("json", [Event(dt=dt)]) self.assert_json_contains_datetime(data, "2011-09-01T17:20:30.405+07:00") obj = next(serializers.deserialize("json", data)).object self.assertEqual(obj.dt, dt.replace(microsecond=405000)) data = serializers.serialize("xml", [Event(dt=dt)]) self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30.405060+07:00") obj = next(serializers.deserialize("xml", data)).object self.assertEqual(obj.dt, dt) if not isinstance( serializers.get_serializer("yaml"), serializers.BadSerializer ): data = serializers.serialize( "yaml", [Event(dt=dt)], default_flow_style=None ) self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30.405060+07:00") obj = next(serializers.deserialize("yaml", data)).object if HAS_YAML and yaml.__version__ < "5.3": self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) else: self.assertEqual(obj.dt, dt) def test_aware_datetime_in_utc(self): dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) data = serializers.serialize("python", [Event(dt=dt)]) self.assert_python_contains_datetime(data, dt) obj = next(serializers.deserialize("python", data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize("json", [Event(dt=dt)]) self.assert_json_contains_datetime(data, "2011-09-01T10:20:30Z") obj = next(serializers.deserialize("json", data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize("xml", [Event(dt=dt)]) self.assert_xml_contains_datetime(data, "2011-09-01T10:20:30+00:00") obj = next(serializers.deserialize("xml", data)).object self.assertEqual(obj.dt, dt) if not isinstance( serializers.get_serializer("yaml"), serializers.BadSerializer ): data = serializers.serialize( "yaml", [Event(dt=dt)], default_flow_style=None ) self.assert_yaml_contains_datetime(data, "2011-09-01 10:20:30+00:00") obj = next(serializers.deserialize("yaml", data)).object self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) def test_aware_datetime_in_local_timezone(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) data = serializers.serialize("python", [Event(dt=dt)]) self.assert_python_contains_datetime(data, dt) obj = next(serializers.deserialize("python", data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize("json", [Event(dt=dt)]) self.assert_json_contains_datetime(data, "2011-09-01T13:20:30+03:00") obj = next(serializers.deserialize("json", data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize("xml", [Event(dt=dt)]) self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30+03:00") obj = next(serializers.deserialize("xml", data)).object self.assertEqual(obj.dt, dt) if not isinstance( serializers.get_serializer("yaml"), serializers.BadSerializer ): data = serializers.serialize( "yaml", [Event(dt=dt)], default_flow_style=None ) self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30+03:00") obj = next(serializers.deserialize("yaml", data)).object if HAS_YAML and yaml.__version__ < "5.3": self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) else: self.assertEqual(obj.dt, dt) def test_aware_datetime_in_other_timezone(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) data = serializers.serialize("python", [Event(dt=dt)]) self.assert_python_contains_datetime(data, dt) obj = next(serializers.deserialize("python", data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize("json", [Event(dt=dt)]) self.assert_json_contains_datetime(data, "2011-09-01T17:20:30+07:00") obj = next(serializers.deserialize("json", data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize("xml", [Event(dt=dt)]) self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30+07:00") obj = next(serializers.deserialize("xml", data)).object self.assertEqual(obj.dt, dt) if not isinstance( serializers.get_serializer("yaml"), serializers.BadSerializer ): data = serializers.serialize( "yaml", [Event(dt=dt)], default_flow_style=None ) self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30+07:00") obj = next(serializers.deserialize("yaml", data)).object if HAS_YAML and yaml.__version__ < "5.3": self.assertEqual(obj.dt.replace(tzinfo=UTC), dt) else: self.assertEqual(obj.dt, dt) @translation.override(None) @override_settings(DATETIME_FORMAT="c", TIME_ZONE="Africa/Nairobi", USE_TZ=True) class TemplateTests(SimpleTestCase): @requires_tz_support def test_localtime_templatetag_and_filters(self): """ Test the {% localtime %} templatetag and related filters. """ datetimes = { "utc": datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), "eat": datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), "ict": datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT), "naive": datetime.datetime(2011, 9, 1, 13, 20, 30), } templates = { "notag": Template( "{% load tz %}" "{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}" ), "noarg": Template( "{% load tz %}{% localtime %}{{ dt }}|{{ dt|localtime }}|" "{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}" ), "on": Template( "{% load tz %}{% localtime on %}{{ dt }}|{{ dt|localtime }}|" "{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}" ), "off": Template( "{% load tz %}{% localtime off %}{{ dt }}|{{ dt|localtime }}|" "{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}" ), } # Transform a list of keys in 'datetimes' to the expected template # output. This makes the definition of 'results' more readable. def t(*result): return "|".join(datetimes[key].isoformat() for key in result) # Results for USE_TZ = True results = { "utc": { "notag": t("eat", "eat", "utc", "ict"), "noarg": t("eat", "eat", "utc", "ict"), "on": t("eat", "eat", "utc", "ict"), "off": t("utc", "eat", "utc", "ict"), }, "eat": { "notag": t("eat", "eat", "utc", "ict"), "noarg": t("eat", "eat", "utc", "ict"), "on": t("eat", "eat", "utc", "ict"), "off": t("eat", "eat", "utc", "ict"), }, "ict": { "notag": t("eat", "eat", "utc", "ict"), "noarg": t("eat", "eat", "utc", "ict"), "on": t("eat", "eat", "utc", "ict"), "off": t("ict", "eat", "utc", "ict"), }, "naive": { "notag": t("naive", "eat", "utc", "ict"), "noarg": t("naive", "eat", "utc", "ict"), "on": t("naive", "eat", "utc", "ict"), "off": t("naive", "eat", "utc", "ict"), }, } for k1, dt in datetimes.items(): for k2, tpl in templates.items(): ctx = Context({"dt": dt, "ICT": ICT}) actual = tpl.render(ctx) expected = results[k1][k2] self.assertEqual( actual, expected, "%s / %s: %r != %r" % (k1, k2, actual, expected) ) # Changes for USE_TZ = False results["utc"]["notag"] = t("utc", "eat", "utc", "ict") results["ict"]["notag"] = t("ict", "eat", "utc", "ict") with self.settings(USE_TZ=False): for k1, dt in datetimes.items(): for k2, tpl in templates.items(): ctx = Context({"dt": dt, "ICT": ICT}) actual = tpl.render(ctx) expected = results[k1][k2] self.assertEqual( actual, expected, "%s / %s: %r != %r" % (k1, k2, actual, expected), ) def test_localtime_filters_with_iana(self): """ Test the |localtime, |utc, and |timezone filters with iana zones. """ # Use an IANA timezone as local time tpl = Template("{% load tz %}{{ dt|localtime }}|{{ dt|utc }}") ctx = Context({"dt": datetime.datetime(2011, 9, 1, 12, 20, 30)}) with self.settings(TIME_ZONE="Europe/Paris"): self.assertEqual( tpl.render(ctx), "2011-09-01T12:20:30+02:00|2011-09-01T10:20:30+00:00" ) # Use an IANA timezone as argument tz = zoneinfo.ZoneInfo("Europe/Paris") tpl = Template("{% load tz %}{{ dt|timezone:tz }}") ctx = Context( { "dt": datetime.datetime(2011, 9, 1, 13, 20, 30), "tz": tz, } ) self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00") def test_localtime_templatetag_invalid_argument(self): with self.assertRaises(TemplateSyntaxError): Template("{% load tz %}{% localtime foo %}{% endlocaltime %}").render() def test_localtime_filters_do_not_raise_exceptions(self): """ Test the |localtime, |utc, and |timezone filters on bad inputs. """ tpl = Template( "{% load tz %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:tz }}" ) with self.settings(USE_TZ=True): # bad datetime value ctx = Context({"dt": None, "tz": ICT}) self.assertEqual(tpl.render(ctx), "None|||") ctx = Context({"dt": "not a date", "tz": ICT}) self.assertEqual(tpl.render(ctx), "not a date|||") # bad timezone value tpl = Template("{% load tz %}{{ dt|timezone:tz }}") ctx = Context({"dt": datetime.datetime(2011, 9, 1, 13, 20, 30), "tz": None}) self.assertEqual(tpl.render(ctx), "") ctx = Context( {"dt": datetime.datetime(2011, 9, 1, 13, 20, 30), "tz": "not a tz"} ) self.assertEqual(tpl.render(ctx), "") @requires_tz_support def test_timezone_templatetag(self): """ Test the {% timezone %} templatetag. """ tpl = Template( "{% load tz %}" "{{ dt }}|" "{% timezone tz1 %}" "{{ dt }}|" "{% timezone tz2 %}" "{{ dt }}" "{% endtimezone %}" "{% endtimezone %}" ) ctx = Context( { "dt": datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), "tz1": ICT, "tz2": None, } ) self.assertEqual( tpl.render(ctx), "2011-09-01T13:20:30+03:00|2011-09-01T17:20:30+07:00|" "2011-09-01T13:20:30+03:00", ) def test_timezone_templatetag_with_iana(self): """ Test the {% timezone %} templatetag with IANA time zone providers. """ tpl = Template("{% load tz %}{% timezone tz %}{{ dt }}{% endtimezone %}") # Use a IANA timezone as argument tz = zoneinfo.ZoneInfo("Europe/Paris") ctx = Context( { "dt": datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), "tz": tz, } ) self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00") # Use a IANA timezone name as argument ctx = Context( { "dt": datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), "tz": "Europe/Paris", } ) self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00") @skipIf(sys.platform == "win32", "Windows uses non-standard time zone names") def test_get_current_timezone_templatetag(self): """ Test the {% get_current_timezone %} templatetag. """ tpl = Template( "{% load tz %}{% get_current_timezone as time_zone %}{{ time_zone }}" ) self.assertEqual(tpl.render(Context()), "Africa/Nairobi") with timezone.override(UTC): self.assertEqual(tpl.render(Context()), "UTC") tpl = Template( "{% load tz %}{% timezone tz %}{% get_current_timezone as time_zone %}" "{% endtimezone %}{{ time_zone }}" ) self.assertEqual(tpl.render(Context({"tz": ICT})), "+0700") with timezone.override(UTC): self.assertEqual(tpl.render(Context({"tz": ICT})), "+0700") def test_get_current_timezone_templatetag_with_iana(self): tpl = Template( "{% load tz %}{% get_current_timezone as time_zone %}{{ time_zone }}" ) tz = zoneinfo.ZoneInfo("Europe/Paris") with timezone.override(tz): self.assertEqual(tpl.render(Context()), "Europe/Paris") tpl = Template( "{% load tz %}{% timezone 'Europe/Paris' %}" "{% get_current_timezone as time_zone %}{% endtimezone %}" "{{ time_zone }}" ) self.assertEqual(tpl.render(Context()), "Europe/Paris") def test_get_current_timezone_templatetag_invalid_argument(self): msg = ( "'get_current_timezone' requires 'as variable' (got " "['get_current_timezone'])" ) with self.assertRaisesMessage(TemplateSyntaxError, msg): Template("{% load tz %}{% get_current_timezone %}").render() @skipIf(sys.platform == "win32", "Windows uses non-standard time zone names") def test_tz_template_context_processor(self): """ Test the django.template.context_processors.tz template context processor. """ tpl = Template("{{ TIME_ZONE }}") context = Context() self.assertEqual(tpl.render(context), "") request_context = RequestContext( HttpRequest(), processors=[context_processors.tz] ) self.assertEqual(tpl.render(request_context), "Africa/Nairobi") @requires_tz_support def test_date_and_time_template_filters(self): tpl = Template("{{ dt|date:'Y-m-d' }} at {{ dt|time:'H:i:s' }}") ctx = Context({"dt": datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)}) self.assertEqual(tpl.render(ctx), "2011-09-01 at 23:20:20") with timezone.override(ICT): self.assertEqual(tpl.render(ctx), "2011-09-02 at 03:20:20") def test_date_and_time_template_filters_honor_localtime(self): tpl = Template( "{% load tz %}{% localtime off %}{{ dt|date:'Y-m-d' }} at " "{{ dt|time:'H:i:s' }}{% endlocaltime %}" ) ctx = Context({"dt": datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)}) self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20") with timezone.override(ICT): self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20") @requires_tz_support def test_now_template_tag_uses_current_time_zone(self): # Regression for #17343 tpl = Template('{% now "O" %}') self.assertEqual(tpl.render(Context({})), "+0300") with timezone.override(ICT): self.assertEqual(tpl.render(Context({})), "+0700") @override_settings(DATETIME_FORMAT="c", TIME_ZONE="Africa/Nairobi", USE_TZ=False) class LegacyFormsTests(TestCase): def test_form(self): form = EventForm({"dt": "2011-09-01 13:20:30"}) self.assertTrue(form.is_valid()) self.assertEqual( form.cleaned_data["dt"], datetime.datetime(2011, 9, 1, 13, 20, 30) ) def test_form_with_non_existent_time(self): form = EventForm({"dt": "2011-03-27 02:30:00"}) tz = zoneinfo.ZoneInfo("Europe/Paris") with timezone.override(tz): # This is a bug. self.assertTrue(form.is_valid()) self.assertEqual( form.cleaned_data["dt"], datetime.datetime(2011, 3, 27, 2, 30, 0), ) def test_form_with_ambiguous_time(self): form = EventForm({"dt": "2011-10-30 02:30:00"}) tz = zoneinfo.ZoneInfo("Europe/Paris") with timezone.override(tz): # This is a bug. self.assertTrue(form.is_valid()) self.assertEqual( form.cleaned_data["dt"], datetime.datetime(2011, 10, 30, 2, 30, 0), ) def test_split_form(self): form = EventSplitForm({"dt_0": "2011-09-01", "dt_1": "13:20:30"}) self.assertTrue(form.is_valid()) self.assertEqual( form.cleaned_data["dt"], datetime.datetime(2011, 9, 1, 13, 20, 30) ) def test_model_form(self): EventModelForm({"dt": "2011-09-01 13:20:30"}).save() e = Event.objects.get() self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 13, 20, 30)) @override_settings(DATETIME_FORMAT="c", TIME_ZONE="Africa/Nairobi", USE_TZ=True) class NewFormsTests(TestCase): @requires_tz_support def test_form(self): form = EventForm({"dt": "2011-09-01 13:20:30"}) self.assertTrue(form.is_valid()) self.assertEqual( form.cleaned_data["dt"], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), ) def test_form_with_other_timezone(self): form = EventForm({"dt": "2011-09-01 17:20:30"}) with timezone.override(ICT): self.assertTrue(form.is_valid()) self.assertEqual( form.cleaned_data["dt"], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), ) def test_form_with_non_existent_time(self): tz = zoneinfo.ZoneInfo("Europe/Paris") with timezone.override(tz): form = EventForm({"dt": "2011-03-27 02:30:00"}) self.assertFalse(form.is_valid()) self.assertEqual( form.errors["dt"], [ "2011-03-27 02:30:00 couldn’t be interpreted in time zone " "Europe/Paris; it may be ambiguous or it may not exist." ], ) def test_form_with_ambiguous_time(self): tz = zoneinfo.ZoneInfo("Europe/Paris") with timezone.override(tz): form = EventForm({"dt": "2011-10-30 02:30:00"}) self.assertFalse(form.is_valid()) self.assertEqual( form.errors["dt"], [ "2011-10-30 02:30:00 couldn’t be interpreted in time zone " "Europe/Paris; it may be ambiguous or it may not exist." ], ) @requires_tz_support def test_split_form(self): form = EventSplitForm({"dt_0": "2011-09-01", "dt_1": "13:20:30"}) self.assertTrue(form.is_valid()) self.assertEqual( form.cleaned_data["dt"], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), ) @requires_tz_support def test_localized_form(self): form = EventLocalizedForm( initial={"dt": datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)} ) with timezone.override(ICT): self.assertIn("2011-09-01 17:20:30", str(form)) @requires_tz_support def test_model_form(self): EventModelForm({"dt": "2011-09-01 13:20:30"}).save() e = Event.objects.get() self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)) @requires_tz_support def test_localized_model_form(self): form = EventLocalizedModelForm( instance=Event(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)) ) with timezone.override(ICT): self.assertIn("2011-09-01 17:20:30", str(form)) @translation.override(None) @override_settings( DATETIME_FORMAT="c", TIME_ZONE="Africa/Nairobi", USE_TZ=True, ROOT_URLCONF="timezones.urls", ) class AdminTests(TestCase): @classmethod def setUpTestData(cls): cls.u1 = User.objects.create_user( password="secret", last_login=datetime.datetime(2007, 5, 30, 13, 20, 10, tzinfo=UTC), is_superuser=True, username="super", first_name="Super", last_name="User", email="[email protected]", is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10, tzinfo=UTC), ) def setUp(self): self.client.force_login(self.u1) @requires_tz_support def test_changelist(self): e = Event.objects.create( dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) ) response = self.client.get(reverse("admin_tz:timezones_event_changelist")) self.assertContains(response, e.dt.astimezone(EAT).isoformat()) def test_changelist_in_other_timezone(self): e = Event.objects.create( dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) ) with timezone.override(ICT): response = self.client.get(reverse("admin_tz:timezones_event_changelist")) self.assertContains(response, e.dt.astimezone(ICT).isoformat()) @requires_tz_support def test_change_editable(self): e = Event.objects.create( dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) ) response = self.client.get( reverse("admin_tz:timezones_event_change", args=(e.pk,)) ) self.assertContains(response, e.dt.astimezone(EAT).date().isoformat()) self.assertContains(response, e.dt.astimezone(EAT).time().isoformat()) def test_change_editable_in_other_timezone(self): e = Event.objects.create( dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC) ) with timezone.override(ICT): response = self.client.get( reverse("admin_tz:timezones_event_change", args=(e.pk,)) ) self.assertContains(response, e.dt.astimezone(ICT).date().isoformat()) self.assertContains(response, e.dt.astimezone(ICT).time().isoformat()) @requires_tz_support def test_change_readonly(self): t = Timestamp.objects.create() response = self.client.get( reverse("admin_tz:timezones_timestamp_change", args=(t.pk,)) ) self.assertContains(response, t.created.astimezone(EAT).isoformat()) def test_change_readonly_in_other_timezone(self): t = Timestamp.objects.create() with timezone.override(ICT): response = self.client.get( reverse("admin_tz:timezones_timestamp_change", args=(t.pk,)) ) self.assertContains(response, t.created.astimezone(ICT).isoformat())
30bcc38f3e966c1af71f63b7f932dbb6811416edb4aeda15e126eabe73b10897
from unittest import mock from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist from django.db import NotSupportedError, connection from django.db.models import Prefetch, QuerySet, prefetch_related_objects from django.db.models.query import get_prefetcher from django.db.models.sql import Query from django.test import ( TestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature, ) from django.test.utils import CaptureQueriesContext from .models import ( Article, Author, Author2, AuthorAddress, AuthorWithAge, Bio, Book, Bookmark, BookReview, BookWithYear, Comment, Department, Employee, FavoriteAuthors, House, LessonEntry, ModelIterableSubclass, Person, Qualification, Reader, Room, TaggedItem, Teacher, WordEntry, ) class TestDataMixin: @classmethod def setUpTestData(cls): cls.book1 = Book.objects.create(title="Poems") cls.book2 = Book.objects.create(title="Jane Eyre") cls.book3 = Book.objects.create(title="Wuthering Heights") cls.book4 = Book.objects.create(title="Sense and Sensibility") cls.author1 = Author.objects.create(name="Charlotte", first_book=cls.book1) cls.author2 = Author.objects.create(name="Anne", first_book=cls.book1) cls.author3 = Author.objects.create(name="Emily", first_book=cls.book1) cls.author4 = Author.objects.create(name="Jane", first_book=cls.book4) cls.book1.authors.add(cls.author1, cls.author2, cls.author3) cls.book2.authors.add(cls.author1) cls.book3.authors.add(cls.author3) cls.book4.authors.add(cls.author4) cls.reader1 = Reader.objects.create(name="Amy") cls.reader2 = Reader.objects.create(name="Belinda") cls.reader1.books_read.add(cls.book1, cls.book4) cls.reader2.books_read.add(cls.book2, cls.book4) class PrefetchRelatedTests(TestDataMixin, TestCase): def assertWhereContains(self, sql, needle): where_idx = sql.index("WHERE") self.assertEqual( sql.count(str(needle), where_idx), 1, msg="WHERE clause doesn't contain %s, actual SQL: %s" % (needle, sql[where_idx:]), ) def test_m2m_forward(self): with self.assertNumQueries(2): lists = [ list(b.authors.all()) for b in Book.objects.prefetch_related("authors") ] normal_lists = [list(b.authors.all()) for b in Book.objects.all()] self.assertEqual(lists, normal_lists) def test_m2m_reverse(self): with self.assertNumQueries(2): lists = [ list(a.books.all()) for a in Author.objects.prefetch_related("books") ] normal_lists = [list(a.books.all()) for a in Author.objects.all()] self.assertEqual(lists, normal_lists) def test_foreignkey_forward(self): with self.assertNumQueries(2): books = [ a.first_book for a in Author.objects.prefetch_related("first_book") ] normal_books = [a.first_book for a in Author.objects.all()] self.assertEqual(books, normal_books) def test_foreignkey_reverse(self): with self.assertNumQueries(2): [ list(b.first_time_authors.all()) for b in Book.objects.prefetch_related("first_time_authors") ] self.assertSequenceEqual(self.book2.authors.all(), [self.author1]) def test_onetoone_reverse_no_match(self): # Regression for #17439 with self.assertNumQueries(2): book = Book.objects.prefetch_related("bookwithyear").all()[0] with self.assertNumQueries(0): with self.assertRaises(BookWithYear.DoesNotExist): book.bookwithyear def test_onetoone_reverse_with_to_field_pk(self): """ A model (Bio) with a OneToOneField primary key (author) that references a non-pk field (name) on the related model (Author) is prefetchable. """ Bio.objects.bulk_create( [ Bio(author=self.author1), Bio(author=self.author2), Bio(author=self.author3), ] ) authors = Author.objects.filter( name__in=[self.author1, self.author2, self.author3], ).prefetch_related("bio") with self.assertNumQueries(2): for author in authors: self.assertEqual(author.name, author.bio.author.name) def test_survives_clone(self): with self.assertNumQueries(2): [ list(b.first_time_authors.all()) for b in Book.objects.prefetch_related("first_time_authors").exclude( id=1000 ) ] def test_len(self): with self.assertNumQueries(2): qs = Book.objects.prefetch_related("first_time_authors") len(qs) [list(b.first_time_authors.all()) for b in qs] def test_bool(self): with self.assertNumQueries(2): qs = Book.objects.prefetch_related("first_time_authors") bool(qs) [list(b.first_time_authors.all()) for b in qs] def test_count(self): with self.assertNumQueries(2): qs = Book.objects.prefetch_related("first_time_authors") [b.first_time_authors.count() for b in qs] def test_exists(self): with self.assertNumQueries(2): qs = Book.objects.prefetch_related("first_time_authors") [b.first_time_authors.exists() for b in qs] def test_in_and_prefetch_related(self): """ Regression test for #20242 - QuerySet "in" didn't work the first time when using prefetch_related. This was fixed by the removal of chunked reads from QuerySet iteration in 70679243d1786e03557c28929f9762a119e3ac14. """ qs = Book.objects.prefetch_related("first_time_authors") self.assertIn(qs[0], qs) def test_clear(self): with self.assertNumQueries(5): with_prefetch = Author.objects.prefetch_related("books") without_prefetch = with_prefetch.prefetch_related(None) [list(a.books.all()) for a in without_prefetch] def test_m2m_then_m2m(self): """A m2m can be followed through another m2m.""" with self.assertNumQueries(3): qs = Author.objects.prefetch_related("books__read_by") lists = [ [[str(r) for r in b.read_by.all()] for b in a.books.all()] for a in qs ] self.assertEqual( lists, [ [["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre [["Amy"]], # Anne - Poems [["Amy"], []], # Emily - Poems, Wuthering Heights [["Amy", "Belinda"]], # Jane - Sense and Sense ], ) def test_overriding_prefetch(self): with self.assertNumQueries(3): qs = Author.objects.prefetch_related("books", "books__read_by") lists = [ [[str(r) for r in b.read_by.all()] for b in a.books.all()] for a in qs ] self.assertEqual( lists, [ [["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre [["Amy"]], # Anne - Poems [["Amy"], []], # Emily - Poems, Wuthering Heights [["Amy", "Belinda"]], # Jane - Sense and Sense ], ) with self.assertNumQueries(3): qs = Author.objects.prefetch_related("books__read_by", "books") lists = [ [[str(r) for r in b.read_by.all()] for b in a.books.all()] for a in qs ] self.assertEqual( lists, [ [["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre [["Amy"]], # Anne - Poems [["Amy"], []], # Emily - Poems, Wuthering Heights [["Amy", "Belinda"]], # Jane - Sense and Sense ], ) def test_get(self): """ Objects retrieved with .get() get the prefetch behavior. """ # Need a double with self.assertNumQueries(3): author = Author.objects.prefetch_related("books__read_by").get( name="Charlotte" ) lists = [[str(r) for r in b.read_by.all()] for b in author.books.all()] self.assertEqual(lists, [["Amy"], ["Belinda"]]) # Poems, Jane Eyre def test_foreign_key_then_m2m(self): """ A m2m relation can be followed after a relation like ForeignKey that doesn't have many objects. """ with self.assertNumQueries(2): qs = Author.objects.select_related("first_book").prefetch_related( "first_book__read_by" ) lists = [[str(r) for r in a.first_book.read_by.all()] for a in qs] self.assertEqual(lists, [["Amy"], ["Amy"], ["Amy"], ["Amy", "Belinda"]]) def test_reverse_one_to_one_then_m2m(self): """ A m2m relation can be followed after going through the select_related reverse of an o2o. """ qs = Author.objects.prefetch_related("bio__books").select_related("bio") with self.assertNumQueries(1): list(qs.all()) Bio.objects.create(author=self.author1) with self.assertNumQueries(2): list(qs.all()) def test_attribute_error(self): qs = Reader.objects.prefetch_related("books_read__xyz") msg = ( "Cannot find 'xyz' on Book object, 'books_read__xyz' " "is an invalid parameter to prefetch_related()" ) with self.assertRaisesMessage(AttributeError, msg) as cm: list(qs) self.assertIn("prefetch_related", str(cm.exception)) def test_invalid_final_lookup(self): qs = Book.objects.prefetch_related("authors__name") msg = ( "'authors__name' does not resolve to an item that supports " "prefetching - this is an invalid parameter to prefetch_related()." ) with self.assertRaisesMessage(ValueError, msg) as cm: list(qs) self.assertIn("prefetch_related", str(cm.exception)) self.assertIn("name", str(cm.exception)) def test_prefetch_eq(self): prefetch_1 = Prefetch("authors", queryset=Author.objects.all()) prefetch_2 = Prefetch("books", queryset=Book.objects.all()) self.assertEqual(prefetch_1, prefetch_1) self.assertEqual(prefetch_1, mock.ANY) self.assertNotEqual(prefetch_1, prefetch_2) def test_forward_m2m_to_attr_conflict(self): msg = "to_attr=authors conflicts with a field on the Book model." authors = Author.objects.all() with self.assertRaisesMessage(ValueError, msg): list( Book.objects.prefetch_related( Prefetch("authors", queryset=authors, to_attr="authors"), ) ) # Without the ValueError, an author was deleted due to the implicit # save of the relation assignment. self.assertEqual(self.book1.authors.count(), 3) def test_reverse_m2m_to_attr_conflict(self): msg = "to_attr=books conflicts with a field on the Author model." poems = Book.objects.filter(title="Poems") with self.assertRaisesMessage(ValueError, msg): list( Author.objects.prefetch_related( Prefetch("books", queryset=poems, to_attr="books"), ) ) # Without the ValueError, a book was deleted due to the implicit # save of reverse relation assignment. self.assertEqual(self.author1.books.count(), 2) def test_m2m_then_reverse_fk_object_ids(self): with CaptureQueriesContext(connection) as queries: list(Book.objects.prefetch_related("authors__addresses")) sql = queries[-1]["sql"] self.assertWhereContains(sql, self.author1.name) def test_m2m_then_m2m_object_ids(self): with CaptureQueriesContext(connection) as queries: list(Book.objects.prefetch_related("authors__favorite_authors")) sql = queries[-1]["sql"] self.assertWhereContains(sql, self.author1.name) def test_m2m_then_reverse_one_to_one_object_ids(self): with CaptureQueriesContext(connection) as queries: list(Book.objects.prefetch_related("authors__authorwithage")) sql = queries[-1]["sql"] self.assertWhereContains(sql, self.author1.id) def test_filter_deferred(self): """ Related filtering of prefetched querysets is deferred on m2m and reverse m2o relations until necessary. """ add_q = Query.add_q for relation in ["authors", "first_time_authors"]: with self.subTest(relation=relation): with mock.patch.object( Query, "add_q", autospec=True, side_effect=lambda self, q: add_q(self, q), ) as add_q_mock: list(Book.objects.prefetch_related(relation)) self.assertEqual(add_q_mock.call_count, 1) def test_named_values_list(self): qs = Author.objects.prefetch_related("books") self.assertCountEqual( [value.name for value in qs.values_list("name", named=True)], ["Anne", "Charlotte", "Emily", "Jane"], ) def test_m2m_prefetching_iterator_with_chunks(self): with self.assertNumQueries(3): authors = [ b.authors.first() for b in Book.objects.prefetch_related("authors").iterator(chunk_size=2) ] self.assertEqual( authors, [self.author1, self.author1, self.author3, self.author4], ) def test_m2m_prefetching_iterator_without_chunks_error(self): msg = ( "chunk_size must be provided when using QuerySet.iterator() after " "prefetch_related()." ) with self.assertRaisesMessage(ValueError, msg): Book.objects.prefetch_related("authors").iterator() class RawQuerySetTests(TestDataMixin, TestCase): def test_basic(self): with self.assertNumQueries(2): books = Book.objects.raw( "SELECT * FROM prefetch_related_book WHERE id = %s", (self.book1.id,) ).prefetch_related("authors") book1 = list(books)[0] with self.assertNumQueries(0): self.assertCountEqual( book1.authors.all(), [self.author1, self.author2, self.author3] ) def test_prefetch_before_raw(self): with self.assertNumQueries(2): books = Book.objects.prefetch_related("authors").raw( "SELECT * FROM prefetch_related_book WHERE id = %s", (self.book1.id,) ) book1 = list(books)[0] with self.assertNumQueries(0): self.assertCountEqual( book1.authors.all(), [self.author1, self.author2, self.author3] ) def test_clear(self): with self.assertNumQueries(5): with_prefetch = Author.objects.raw( "SELECT * FROM prefetch_related_author" ).prefetch_related("books") without_prefetch = with_prefetch.prefetch_related(None) [list(a.books.all()) for a in without_prefetch] class CustomPrefetchTests(TestCase): @classmethod def traverse_qs(cls, obj_iter, path): """ Helper method that returns a list containing a list of the objects in the obj_iter. Then for each object in the obj_iter, the path will be recursively travelled and the found objects are added to the return value. """ ret_val = [] if hasattr(obj_iter, "all"): obj_iter = obj_iter.all() try: iter(obj_iter) except TypeError: obj_iter = [obj_iter] for obj in obj_iter: rel_objs = [] for part in path: if not part: continue try: related = getattr(obj, part[0]) except ObjectDoesNotExist: continue if related is not None: rel_objs.extend(cls.traverse_qs(related, [part[1:]])) ret_val.append((obj, rel_objs)) return ret_val @classmethod def setUpTestData(cls): cls.person1 = Person.objects.create(name="Joe") cls.person2 = Person.objects.create(name="Mary") # Set main_room for each house before creating the next one for # databases where supports_nullable_unique_constraints is False. cls.house1 = House.objects.create( name="House 1", address="123 Main St", owner=cls.person1 ) cls.room1_1 = Room.objects.create(name="Dining room", house=cls.house1) cls.room1_2 = Room.objects.create(name="Lounge", house=cls.house1) cls.room1_3 = Room.objects.create(name="Kitchen", house=cls.house1) cls.house1.main_room = cls.room1_1 cls.house1.save() cls.person1.houses.add(cls.house1) cls.house2 = House.objects.create( name="House 2", address="45 Side St", owner=cls.person1 ) cls.room2_1 = Room.objects.create(name="Dining room", house=cls.house2) cls.room2_2 = Room.objects.create(name="Lounge", house=cls.house2) cls.room2_3 = Room.objects.create(name="Kitchen", house=cls.house2) cls.house2.main_room = cls.room2_1 cls.house2.save() cls.person1.houses.add(cls.house2) cls.house3 = House.objects.create( name="House 3", address="6 Downing St", owner=cls.person2 ) cls.room3_1 = Room.objects.create(name="Dining room", house=cls.house3) cls.room3_2 = Room.objects.create(name="Lounge", house=cls.house3) cls.room3_3 = Room.objects.create(name="Kitchen", house=cls.house3) cls.house3.main_room = cls.room3_1 cls.house3.save() cls.person2.houses.add(cls.house3) cls.house4 = House.objects.create( name="house 4", address="7 Regents St", owner=cls.person2 ) cls.room4_1 = Room.objects.create(name="Dining room", house=cls.house4) cls.room4_2 = Room.objects.create(name="Lounge", house=cls.house4) cls.room4_3 = Room.objects.create(name="Kitchen", house=cls.house4) cls.house4.main_room = cls.room4_1 cls.house4.save() cls.person2.houses.add(cls.house4) def test_traverse_qs(self): qs = Person.objects.prefetch_related("houses") related_objs_normal = ([list(p.houses.all()) for p in qs],) related_objs_from_traverse = [ [inner[0] for inner in o[1]] for o in self.traverse_qs(qs, [["houses"]]) ] self.assertEqual(related_objs_normal, (related_objs_from_traverse,)) def test_ambiguous(self): # Ambiguous: Lookup was already seen with a different queryset. msg = ( "'houses' lookup was already seen with a different queryset. You " "may need to adjust the ordering of your lookups." ) # lookup.queryset shouldn't be evaluated. with self.assertNumQueries(3): with self.assertRaisesMessage(ValueError, msg): self.traverse_qs( Person.objects.prefetch_related( "houses__rooms", Prefetch("houses", queryset=House.objects.all()), ), [["houses", "rooms"]], ) # Ambiguous: Lookup houses_lst doesn't yet exist when performing # houses_lst__rooms. msg = ( "Cannot find 'houses_lst' on Person object, 'houses_lst__rooms' is " "an invalid parameter to prefetch_related()" ) with self.assertRaisesMessage(AttributeError, msg): self.traverse_qs( Person.objects.prefetch_related( "houses_lst__rooms", Prefetch( "houses", queryset=House.objects.all(), to_attr="houses_lst" ), ), [["houses", "rooms"]], ) # Not ambiguous. self.traverse_qs( Person.objects.prefetch_related("houses__rooms", "houses"), [["houses", "rooms"]], ) self.traverse_qs( Person.objects.prefetch_related( "houses__rooms", Prefetch("houses", queryset=House.objects.all(), to_attr="houses_lst"), ), [["houses", "rooms"]], ) def test_m2m(self): # Control lookups. with self.assertNumQueries(2): lst1 = self.traverse_qs( Person.objects.prefetch_related("houses"), [["houses"]] ) # Test lookups. with self.assertNumQueries(2): lst2 = self.traverse_qs( Person.objects.prefetch_related(Prefetch("houses")), [["houses"]] ) self.assertEqual(lst1, lst2) with self.assertNumQueries(2): lst2 = self.traverse_qs( Person.objects.prefetch_related( Prefetch("houses", to_attr="houses_lst") ), [["houses_lst"]], ) self.assertEqual(lst1, lst2) def test_reverse_m2m(self): # Control lookups. with self.assertNumQueries(2): lst1 = self.traverse_qs( House.objects.prefetch_related("occupants"), [["occupants"]] ) # Test lookups. with self.assertNumQueries(2): lst2 = self.traverse_qs( House.objects.prefetch_related(Prefetch("occupants")), [["occupants"]] ) self.assertEqual(lst1, lst2) with self.assertNumQueries(2): lst2 = self.traverse_qs( House.objects.prefetch_related( Prefetch("occupants", to_attr="occupants_lst") ), [["occupants_lst"]], ) self.assertEqual(lst1, lst2) def test_m2m_through_fk(self): # Control lookups. with self.assertNumQueries(3): lst1 = self.traverse_qs( Room.objects.prefetch_related("house__occupants"), [["house", "occupants"]], ) # Test lookups. with self.assertNumQueries(3): lst2 = self.traverse_qs( Room.objects.prefetch_related(Prefetch("house__occupants")), [["house", "occupants"]], ) self.assertEqual(lst1, lst2) with self.assertNumQueries(3): lst2 = self.traverse_qs( Room.objects.prefetch_related( Prefetch("house__occupants", to_attr="occupants_lst") ), [["house", "occupants_lst"]], ) self.assertEqual(lst1, lst2) def test_m2m_through_gfk(self): TaggedItem.objects.create(tag="houses", content_object=self.house1) TaggedItem.objects.create(tag="houses", content_object=self.house2) # Control lookups. with self.assertNumQueries(3): lst1 = self.traverse_qs( TaggedItem.objects.filter(tag="houses").prefetch_related( "content_object__rooms" ), [["content_object", "rooms"]], ) # Test lookups. with self.assertNumQueries(3): lst2 = self.traverse_qs( TaggedItem.objects.prefetch_related( Prefetch("content_object"), Prefetch("content_object__rooms", to_attr="rooms_lst"), ), [["content_object", "rooms_lst"]], ) self.assertEqual(lst1, lst2) def test_o2m_through_m2m(self): # Control lookups. with self.assertNumQueries(3): lst1 = self.traverse_qs( Person.objects.prefetch_related("houses", "houses__rooms"), [["houses", "rooms"]], ) # Test lookups. with self.assertNumQueries(3): lst2 = self.traverse_qs( Person.objects.prefetch_related(Prefetch("houses"), "houses__rooms"), [["houses", "rooms"]], ) self.assertEqual(lst1, lst2) with self.assertNumQueries(3): lst2 = self.traverse_qs( Person.objects.prefetch_related( Prefetch("houses"), Prefetch("houses__rooms") ), [["houses", "rooms"]], ) self.assertEqual(lst1, lst2) with self.assertNumQueries(3): lst2 = self.traverse_qs( Person.objects.prefetch_related( Prefetch("houses", to_attr="houses_lst"), "houses_lst__rooms" ), [["houses_lst", "rooms"]], ) self.assertEqual(lst1, lst2) with self.assertNumQueries(3): lst2 = self.traverse_qs( Person.objects.prefetch_related( Prefetch("houses", to_attr="houses_lst"), Prefetch("houses_lst__rooms", to_attr="rooms_lst"), ), [["houses_lst", "rooms_lst"]], ) self.assertEqual(lst1, lst2) def test_generic_rel(self): bookmark = Bookmark.objects.create(url="http://www.djangoproject.com/") TaggedItem.objects.create(content_object=bookmark, tag="django") TaggedItem.objects.create( content_object=bookmark, favorite=bookmark, tag="python" ) # Control lookups. with self.assertNumQueries(4): lst1 = self.traverse_qs( Bookmark.objects.prefetch_related( "tags", "tags__content_object", "favorite_tags" ), [["tags", "content_object"], ["favorite_tags"]], ) # Test lookups. with self.assertNumQueries(4): lst2 = self.traverse_qs( Bookmark.objects.prefetch_related( Prefetch("tags", to_attr="tags_lst"), Prefetch("tags_lst__content_object"), Prefetch("favorite_tags"), ), [["tags_lst", "content_object"], ["favorite_tags"]], ) self.assertEqual(lst1, lst2) def test_traverse_single_item_property(self): # Control lookups. with self.assertNumQueries(5): lst1 = self.traverse_qs( Person.objects.prefetch_related( "houses__rooms", "primary_house__occupants__houses", ), [["primary_house", "occupants", "houses"]], ) # Test lookups. with self.assertNumQueries(5): lst2 = self.traverse_qs( Person.objects.prefetch_related( "houses__rooms", Prefetch("primary_house__occupants", to_attr="occupants_lst"), "primary_house__occupants_lst__houses", ), [["primary_house", "occupants_lst", "houses"]], ) self.assertEqual(lst1, lst2) def test_traverse_multiple_items_property(self): # Control lookups. with self.assertNumQueries(4): lst1 = self.traverse_qs( Person.objects.prefetch_related( "houses", "all_houses__occupants__houses", ), [["all_houses", "occupants", "houses"]], ) # Test lookups. with self.assertNumQueries(4): lst2 = self.traverse_qs( Person.objects.prefetch_related( "houses", Prefetch("all_houses__occupants", to_attr="occupants_lst"), "all_houses__occupants_lst__houses", ), [["all_houses", "occupants_lst", "houses"]], ) self.assertEqual(lst1, lst2) def test_custom_qs(self): # Test basic. with self.assertNumQueries(2): lst1 = list(Person.objects.prefetch_related("houses")) with self.assertNumQueries(2): lst2 = list( Person.objects.prefetch_related( Prefetch( "houses", queryset=House.objects.all(), to_attr="houses_lst" ) ) ) self.assertEqual( self.traverse_qs(lst1, [["houses"]]), self.traverse_qs(lst2, [["houses_lst"]]), ) # Test queryset filtering. with self.assertNumQueries(2): lst2 = list( Person.objects.prefetch_related( Prefetch( "houses", queryset=House.objects.filter( pk__in=[self.house1.pk, self.house3.pk] ), to_attr="houses_lst", ) ) ) self.assertEqual(len(lst2[0].houses_lst), 1) self.assertEqual(lst2[0].houses_lst[0], self.house1) self.assertEqual(len(lst2[1].houses_lst), 1) self.assertEqual(lst2[1].houses_lst[0], self.house3) # Test flattened. with self.assertNumQueries(3): lst1 = list(Person.objects.prefetch_related("houses__rooms")) with self.assertNumQueries(3): lst2 = list( Person.objects.prefetch_related( Prefetch( "houses__rooms", queryset=Room.objects.all(), to_attr="rooms_lst", ) ) ) self.assertEqual( self.traverse_qs(lst1, [["houses", "rooms"]]), self.traverse_qs(lst2, [["houses", "rooms_lst"]]), ) # Test inner select_related. with self.assertNumQueries(3): lst1 = list(Person.objects.prefetch_related("houses__owner")) with self.assertNumQueries(2): lst2 = list( Person.objects.prefetch_related( Prefetch("houses", queryset=House.objects.select_related("owner")) ) ) self.assertEqual( self.traverse_qs(lst1, [["houses", "owner"]]), self.traverse_qs(lst2, [["houses", "owner"]]), ) # Test inner prefetch. inner_rooms_qs = Room.objects.filter(pk__in=[self.room1_1.pk, self.room1_2.pk]) houses_qs_prf = House.objects.prefetch_related( Prefetch("rooms", queryset=inner_rooms_qs, to_attr="rooms_lst") ) with self.assertNumQueries(4): lst2 = list( Person.objects.prefetch_related( Prefetch( "houses", queryset=houses_qs_prf.filter(pk=self.house1.pk), to_attr="houses_lst", ), Prefetch("houses_lst__rooms_lst__main_room_of"), ) ) self.assertEqual(len(lst2[0].houses_lst[0].rooms_lst), 2) self.assertEqual(lst2[0].houses_lst[0].rooms_lst[0], self.room1_1) self.assertEqual(lst2[0].houses_lst[0].rooms_lst[1], self.room1_2) self.assertEqual(lst2[0].houses_lst[0].rooms_lst[0].main_room_of, self.house1) self.assertEqual(len(lst2[1].houses_lst), 0) # Test ForwardManyToOneDescriptor. houses = House.objects.select_related("owner") with self.assertNumQueries(6): rooms = Room.objects.prefetch_related("house") lst1 = self.traverse_qs(rooms, [["house", "owner"]]) with self.assertNumQueries(2): rooms = Room.objects.prefetch_related(Prefetch("house", queryset=houses)) lst2 = self.traverse_qs(rooms, [["house", "owner"]]) self.assertEqual(lst1, lst2) with self.assertNumQueries(2): houses = House.objects.select_related("owner") rooms = Room.objects.prefetch_related( Prefetch("house", queryset=houses, to_attr="house_attr") ) lst2 = self.traverse_qs(rooms, [["house_attr", "owner"]]) self.assertEqual(lst1, lst2) room = Room.objects.prefetch_related( Prefetch("house", queryset=houses.filter(address="DoesNotExist")) ).first() with self.assertRaises(ObjectDoesNotExist): getattr(room, "house") room = Room.objects.prefetch_related( Prefetch( "house", queryset=houses.filter(address="DoesNotExist"), to_attr="house_attr", ) ).first() self.assertIsNone(room.house_attr) rooms = Room.objects.prefetch_related( Prefetch("house", queryset=House.objects.only("name")) ) with self.assertNumQueries(2): getattr(rooms.first().house, "name") with self.assertNumQueries(3): getattr(rooms.first().house, "address") # Test ReverseOneToOneDescriptor. houses = House.objects.select_related("owner") with self.assertNumQueries(6): rooms = Room.objects.prefetch_related("main_room_of") lst1 = self.traverse_qs(rooms, [["main_room_of", "owner"]]) with self.assertNumQueries(2): rooms = Room.objects.prefetch_related( Prefetch("main_room_of", queryset=houses) ) lst2 = self.traverse_qs(rooms, [["main_room_of", "owner"]]) self.assertEqual(lst1, lst2) with self.assertNumQueries(2): rooms = list( Room.objects.prefetch_related( Prefetch( "main_room_of", queryset=houses, to_attr="main_room_of_attr", ) ) ) lst2 = self.traverse_qs(rooms, [["main_room_of_attr", "owner"]]) self.assertEqual(lst1, lst2) room = ( Room.objects.filter(main_room_of__isnull=False) .prefetch_related( Prefetch("main_room_of", queryset=houses.filter(address="DoesNotExist")) ) .first() ) with self.assertRaises(ObjectDoesNotExist): getattr(room, "main_room_of") room = ( Room.objects.filter(main_room_of__isnull=False) .prefetch_related( Prefetch( "main_room_of", queryset=houses.filter(address="DoesNotExist"), to_attr="main_room_of_attr", ) ) .first() ) self.assertIsNone(room.main_room_of_attr) # The custom queryset filters should be applied to the queryset # instance returned by the manager. person = Person.objects.prefetch_related( Prefetch("houses", queryset=House.objects.filter(name="House 1")), ).get(pk=self.person1.pk) self.assertEqual( list(person.houses.all()), list(person.houses.all().all()), ) def test_nested_prefetch_related_are_not_overwritten(self): # Regression test for #24873 houses_2 = House.objects.prefetch_related(Prefetch("rooms")) persons = Person.objects.prefetch_related(Prefetch("houses", queryset=houses_2)) houses = House.objects.prefetch_related(Prefetch("occupants", queryset=persons)) list(houses) # queryset must be evaluated once to reproduce the bug. self.assertEqual( houses.all()[0].occupants.all()[0].houses.all()[1].rooms.all()[0], self.room2_1, ) def test_nested_prefetch_related_with_duplicate_prefetcher(self): """ Nested prefetches whose name clashes with descriptor names (Person.houses here) are allowed. """ occupants = Person.objects.prefetch_related( Prefetch("houses", to_attr="some_attr_name"), Prefetch("houses", queryset=House.objects.prefetch_related("main_room")), ) houses = House.objects.prefetch_related( Prefetch("occupants", queryset=occupants) ) with self.assertNumQueries(5): self.traverse_qs(list(houses), [["occupants", "houses", "main_room"]]) def test_values_queryset(self): msg = "Prefetch querysets cannot use raw(), values(), and values_list()." with self.assertRaisesMessage(ValueError, msg): Prefetch("houses", House.objects.values("pk")) with self.assertRaisesMessage(ValueError, msg): Prefetch("houses", House.objects.values_list("pk")) # That error doesn't affect managers with custom ModelIterable subclasses self.assertIs( Teacher.objects_custom.all()._iterable_class, ModelIterableSubclass ) Prefetch("teachers", Teacher.objects_custom.all()) def test_raw_queryset(self): msg = "Prefetch querysets cannot use raw(), values(), and values_list()." with self.assertRaisesMessage(ValueError, msg): Prefetch("houses", House.objects.raw("select pk from house")) def test_to_attr_doesnt_cache_through_attr_as_list(self): house = House.objects.prefetch_related( Prefetch("rooms", queryset=Room.objects.all(), to_attr="to_rooms"), ).get(pk=self.house3.pk) self.assertIsInstance(house.rooms.all(), QuerySet) def test_to_attr_cached_property(self): persons = Person.objects.prefetch_related( Prefetch("houses", House.objects.all(), to_attr="cached_all_houses"), ) for person in persons: # To bypass caching at the related descriptor level, don't use # person.houses.all() here. all_houses = list(House.objects.filter(occupants=person)) with self.assertNumQueries(0): self.assertEqual(person.cached_all_houses, all_houses) def test_filter_deferred(self): """ Related filtering of prefetched querysets is deferred until necessary. """ add_q = Query.add_q with mock.patch.object( Query, "add_q", autospec=True, side_effect=lambda self, q: add_q(self, q), ) as add_q_mock: list( House.objects.prefetch_related( Prefetch("occupants", queryset=Person.objects.all()) ) ) self.assertEqual(add_q_mock.call_count, 1) class DefaultManagerTests(TestCase): @classmethod def setUpTestData(cls): cls.qual1 = Qualification.objects.create(name="BA") cls.qual2 = Qualification.objects.create(name="BSci") cls.qual3 = Qualification.objects.create(name="MA") cls.qual4 = Qualification.objects.create(name="PhD") cls.teacher1 = Teacher.objects.create(name="Mr Cleese") cls.teacher2 = Teacher.objects.create(name="Mr Idle") cls.teacher3 = Teacher.objects.create(name="Mr Chapman") cls.teacher1.qualifications.add(cls.qual1, cls.qual2, cls.qual3, cls.qual4) cls.teacher2.qualifications.add(cls.qual1) cls.teacher3.qualifications.add(cls.qual2) cls.dept1 = Department.objects.create(name="English") cls.dept2 = Department.objects.create(name="Physics") cls.dept1.teachers.add(cls.teacher1, cls.teacher2) cls.dept2.teachers.add(cls.teacher1, cls.teacher3) def test_m2m_then_m2m(self): with self.assertNumQueries(3): # When we prefetch the teachers, and force the query, we don't want # the default manager on teachers to immediately get all the related # qualifications, since this will do one query per teacher. qs = Department.objects.prefetch_related("teachers") depts = "".join( "%s department: %s\n" % (dept.name, ", ".join(str(t) for t in dept.teachers.all())) for dept in qs ) self.assertEqual( depts, "English department: Mr Cleese (BA, BSci, MA, PhD), Mr Idle (BA)\n" "Physics department: Mr Cleese (BA, BSci, MA, PhD), Mr Chapman " "(BSci)\n", ) class GenericRelationTests(TestCase): @classmethod def setUpTestData(cls): book1 = Book.objects.create(title="Winnie the Pooh") book2 = Book.objects.create(title="Do you like green eggs and spam?") book3 = Book.objects.create(title="Three Men In A Boat") reader1 = Reader.objects.create(name="me") reader2 = Reader.objects.create(name="you") reader3 = Reader.objects.create(name="someone") book1.read_by.add(reader1, reader2) book2.read_by.add(reader2) book3.read_by.add(reader3) cls.book1, cls.book2, cls.book3 = book1, book2, book3 cls.reader1, cls.reader2, cls.reader3 = reader1, reader2, reader3 def test_prefetch_GFK(self): TaggedItem.objects.create(tag="awesome", content_object=self.book1) TaggedItem.objects.create(tag="great", content_object=self.reader1) TaggedItem.objects.create(tag="outstanding", content_object=self.book2) TaggedItem.objects.create(tag="amazing", content_object=self.reader3) # 1 for TaggedItem table, 1 for Book table, 1 for Reader table with self.assertNumQueries(3): qs = TaggedItem.objects.prefetch_related("content_object") list(qs) def test_prefetch_GFK_nonint_pk(self): Comment.objects.create(comment="awesome", content_object=self.book1) # 1 for Comment table, 1 for Book table with self.assertNumQueries(2): qs = Comment.objects.prefetch_related("content_object") [c.content_object for c in qs] def test_prefetch_GFK_uuid_pk(self): article = Article.objects.create(name="Django") Comment.objects.create(comment="awesome", content_object_uuid=article) qs = Comment.objects.prefetch_related("content_object_uuid") self.assertEqual([c.content_object_uuid for c in qs], [article]) def test_prefetch_GFK_fk_pk(self): book = Book.objects.create(title="Poems") book_with_year = BookWithYear.objects.create(book=book, published_year=2019) Comment.objects.create(comment="awesome", content_object=book_with_year) qs = Comment.objects.prefetch_related("content_object") self.assertEqual([c.content_object for c in qs], [book_with_year]) def test_traverse_GFK(self): """ A 'content_object' can be traversed with prefetch_related() and get to related objects on the other side (assuming it is suitably filtered) """ TaggedItem.objects.create(tag="awesome", content_object=self.book1) TaggedItem.objects.create(tag="awesome", content_object=self.book2) TaggedItem.objects.create(tag="awesome", content_object=self.book3) TaggedItem.objects.create(tag="awesome", content_object=self.reader1) TaggedItem.objects.create(tag="awesome", content_object=self.reader2) ct = ContentType.objects.get_for_model(Book) # We get 3 queries - 1 for main query, 1 for content_objects since they # all use the same table, and 1 for the 'read_by' relation. with self.assertNumQueries(3): # If we limit to books, we know that they will have 'read_by' # attributes, so the following makes sense: qs = TaggedItem.objects.filter( content_type=ct, tag="awesome" ).prefetch_related("content_object__read_by") readers_of_awesome_books = { r.name for tag in qs for r in tag.content_object.read_by.all() } self.assertEqual(readers_of_awesome_books, {"me", "you", "someone"}) def test_nullable_GFK(self): TaggedItem.objects.create( tag="awesome", content_object=self.book1, created_by=self.reader1 ) TaggedItem.objects.create(tag="great", content_object=self.book2) TaggedItem.objects.create(tag="rubbish", content_object=self.book3) with self.assertNumQueries(2): result = [ t.created_by for t in TaggedItem.objects.prefetch_related("created_by") ] self.assertEqual(result, [t.created_by for t in TaggedItem.objects.all()]) def test_generic_relation(self): bookmark = Bookmark.objects.create(url="http://www.djangoproject.com/") TaggedItem.objects.create(content_object=bookmark, tag="django") TaggedItem.objects.create(content_object=bookmark, tag="python") with self.assertNumQueries(2): tags = [ t.tag for b in Bookmark.objects.prefetch_related("tags") for t in b.tags.all() ] self.assertEqual(sorted(tags), ["django", "python"]) def test_charfield_GFK(self): b = Bookmark.objects.create(url="http://www.djangoproject.com/") TaggedItem.objects.create(content_object=b, tag="django") TaggedItem.objects.create(content_object=b, favorite=b, tag="python") with self.assertNumQueries(3): bookmark = Bookmark.objects.filter(pk=b.pk).prefetch_related( "tags", "favorite_tags" )[0] self.assertEqual( sorted(i.tag for i in bookmark.tags.all()), ["django", "python"] ) self.assertEqual([i.tag for i in bookmark.favorite_tags.all()], ["python"]) def test_custom_queryset(self): bookmark = Bookmark.objects.create(url="http://www.djangoproject.com/") django_tag = TaggedItem.objects.create(content_object=bookmark, tag="django") TaggedItem.objects.create(content_object=bookmark, tag="python") with self.assertNumQueries(2): bookmark = Bookmark.objects.prefetch_related( Prefetch("tags", TaggedItem.objects.filter(tag="django")), ).get() with self.assertNumQueries(0): self.assertEqual(list(bookmark.tags.all()), [django_tag]) # The custom queryset filters should be applied to the queryset # instance returned by the manager. self.assertEqual(list(bookmark.tags.all()), list(bookmark.tags.all().all())) def test_deleted_GFK(self): TaggedItem.objects.create(tag="awesome", content_object=self.book1) TaggedItem.objects.create(tag="awesome", content_object=self.book2) ct = ContentType.objects.get_for_model(Book) book1_pk = self.book1.pk self.book1.delete() with self.assertNumQueries(2): qs = TaggedItem.objects.filter(tag="awesome").prefetch_related( "content_object" ) result = [ (tag.object_id, tag.content_type_id, tag.content_object) for tag in qs ] self.assertEqual( result, [ (book1_pk, ct.pk, None), (self.book2.pk, ct.pk, self.book2), ], ) class MultiTableInheritanceTest(TestCase): @classmethod def setUpTestData(cls): cls.book1 = BookWithYear.objects.create(title="Poems", published_year=2010) cls.book2 = BookWithYear.objects.create(title="More poems", published_year=2011) cls.author1 = AuthorWithAge.objects.create( name="Jane", first_book=cls.book1, age=50 ) cls.author2 = AuthorWithAge.objects.create( name="Tom", first_book=cls.book1, age=49 ) cls.author3 = AuthorWithAge.objects.create( name="Robert", first_book=cls.book2, age=48 ) cls.author_address = AuthorAddress.objects.create( author=cls.author1, address="SomeStreet 1" ) cls.book2.aged_authors.add(cls.author2, cls.author3) cls.br1 = BookReview.objects.create(book=cls.book1, notes="review book1") cls.br2 = BookReview.objects.create(book=cls.book2, notes="review book2") def test_foreignkey(self): with self.assertNumQueries(2): qs = AuthorWithAge.objects.prefetch_related("addresses") addresses = [ [str(address) for address in obj.addresses.all()] for obj in qs ] self.assertEqual(addresses, [[str(self.author_address)], [], []]) def test_foreignkey_to_inherited(self): with self.assertNumQueries(2): qs = BookReview.objects.prefetch_related("book") titles = [obj.book.title for obj in qs] self.assertCountEqual(titles, ["Poems", "More poems"]) def test_m2m_to_inheriting_model(self): qs = AuthorWithAge.objects.prefetch_related("books_with_year") with self.assertNumQueries(2): lst = [ [str(book) for book in author.books_with_year.all()] for author in qs ] qs = AuthorWithAge.objects.all() lst2 = [[str(book) for book in author.books_with_year.all()] for author in qs] self.assertEqual(lst, lst2) qs = BookWithYear.objects.prefetch_related("aged_authors") with self.assertNumQueries(2): lst = [[str(author) for author in book.aged_authors.all()] for book in qs] qs = BookWithYear.objects.all() lst2 = [[str(author) for author in book.aged_authors.all()] for book in qs] self.assertEqual(lst, lst2) def test_parent_link_prefetch(self): with self.assertNumQueries(2): [a.author for a in AuthorWithAge.objects.prefetch_related("author")] @override_settings(DEBUG=True) def test_child_link_prefetch(self): with self.assertNumQueries(2): authors = [ a.authorwithage for a in Author.objects.prefetch_related("authorwithage") ] # Regression for #18090: the prefetching query must include an IN clause. # Note that on Oracle the table name is upper case in the generated SQL, # thus the .lower() call. self.assertIn("authorwithage", connection.queries[-1]["sql"].lower()) self.assertIn(" IN ", connection.queries[-1]["sql"]) self.assertEqual(authors, [a.authorwithage for a in Author.objects.all()]) class ForeignKeyToFieldTest(TestCase): @classmethod def setUpTestData(cls): cls.book = Book.objects.create(title="Poems") cls.author1 = Author.objects.create(name="Jane", first_book=cls.book) cls.author2 = Author.objects.create(name="Tom", first_book=cls.book) cls.author3 = Author.objects.create(name="Robert", first_book=cls.book) cls.author_address = AuthorAddress.objects.create( author=cls.author1, address="SomeStreet 1" ) FavoriteAuthors.objects.create(author=cls.author1, likes_author=cls.author2) FavoriteAuthors.objects.create(author=cls.author2, likes_author=cls.author3) FavoriteAuthors.objects.create(author=cls.author3, likes_author=cls.author1) def test_foreignkey(self): with self.assertNumQueries(2): qs = Author.objects.prefetch_related("addresses") addresses = [ [str(address) for address in obj.addresses.all()] for obj in qs ] self.assertEqual(addresses, [[str(self.author_address)], [], []]) def test_m2m(self): with self.assertNumQueries(3): qs = Author.objects.prefetch_related("favorite_authors", "favors_me") favorites = [ ( [str(i_like) for i_like in author.favorite_authors.all()], [str(likes_me) for likes_me in author.favors_me.all()], ) for author in qs ] self.assertEqual( favorites, [ ([str(self.author2)], [str(self.author3)]), ([str(self.author3)], [str(self.author1)]), ([str(self.author1)], [str(self.author2)]), ], ) class LookupOrderingTest(TestCase): """ Test cases that demonstrate that ordering of lookups is important, and ensure it is preserved. """ @classmethod def setUpTestData(cls): person1 = Person.objects.create(name="Joe") person2 = Person.objects.create(name="Mary") # Set main_room for each house before creating the next one for # databases where supports_nullable_unique_constraints is False. house1 = House.objects.create(address="123 Main St") room1_1 = Room.objects.create(name="Dining room", house=house1) Room.objects.create(name="Lounge", house=house1) Room.objects.create(name="Kitchen", house=house1) house1.main_room = room1_1 house1.save() person1.houses.add(house1) house2 = House.objects.create(address="45 Side St") room2_1 = Room.objects.create(name="Dining room", house=house2) Room.objects.create(name="Lounge", house=house2) house2.main_room = room2_1 house2.save() person1.houses.add(house2) house3 = House.objects.create(address="6 Downing St") room3_1 = Room.objects.create(name="Dining room", house=house3) Room.objects.create(name="Lounge", house=house3) Room.objects.create(name="Kitchen", house=house3) house3.main_room = room3_1 house3.save() person2.houses.add(house3) house4 = House.objects.create(address="7 Regents St") room4_1 = Room.objects.create(name="Dining room", house=house4) Room.objects.create(name="Lounge", house=house4) house4.main_room = room4_1 house4.save() person2.houses.add(house4) def test_order(self): with self.assertNumQueries(4): # The following two queries must be done in the same order as written, # otherwise 'primary_house' will cause non-prefetched lookups qs = Person.objects.prefetch_related( "houses__rooms", "primary_house__occupants" ) [list(p.primary_house.occupants.all()) for p in qs] class NullableTest(TestCase): @classmethod def setUpTestData(cls): boss = Employee.objects.create(name="Peter") Employee.objects.create(name="Joe", boss=boss) Employee.objects.create(name="Angela", boss=boss) def test_traverse_nullable(self): # Because we use select_related() for 'boss', it doesn't need to be # prefetched, but we can still traverse it although it contains some nulls with self.assertNumQueries(2): qs = Employee.objects.select_related("boss").prefetch_related("boss__serfs") co_serfs = [ list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs ] qs2 = Employee.objects.select_related("boss") co_serfs2 = [ list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs2 ] self.assertEqual(co_serfs, co_serfs2) def test_prefetch_nullable(self): # One for main employee, one for boss, one for serfs with self.assertNumQueries(3): qs = Employee.objects.prefetch_related("boss__serfs") co_serfs = [ list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs ] qs2 = Employee.objects.all() co_serfs2 = [ list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs2 ] self.assertEqual(co_serfs, co_serfs2) def test_in_bulk(self): """ In-bulk does correctly prefetch objects by not using .iterator() directly. """ boss1 = Employee.objects.create(name="Peter") boss2 = Employee.objects.create(name="Jack") with self.assertNumQueries(2): # Prefetch is done and it does not cause any errors. bulk = Employee.objects.prefetch_related("serfs").in_bulk( [boss1.pk, boss2.pk] ) for b in bulk.values(): list(b.serfs.all()) class MultiDbTests(TestCase): databases = {"default", "other"} def test_using_is_honored_m2m(self): B = Book.objects.using("other") A = Author.objects.using("other") book1 = B.create(title="Poems") book2 = B.create(title="Jane Eyre") book3 = B.create(title="Wuthering Heights") book4 = B.create(title="Sense and Sensibility") author1 = A.create(name="Charlotte", first_book=book1) author2 = A.create(name="Anne", first_book=book1) author3 = A.create(name="Emily", first_book=book1) author4 = A.create(name="Jane", first_book=book4) book1.authors.add(author1, author2, author3) book2.authors.add(author1) book3.authors.add(author3) book4.authors.add(author4) # Forward qs1 = B.prefetch_related("authors") with self.assertNumQueries(2, using="other"): books = "".join( "%s (%s)\n" % (book.title, ", ".join(a.name for a in book.authors.all())) for book in qs1 ) self.assertEqual( books, "Poems (Charlotte, Anne, Emily)\n" "Jane Eyre (Charlotte)\n" "Wuthering Heights (Emily)\n" "Sense and Sensibility (Jane)\n", ) # Reverse qs2 = A.prefetch_related("books") with self.assertNumQueries(2, using="other"): authors = "".join( "%s: %s\n" % (author.name, ", ".join(b.title for b in author.books.all())) for author in qs2 ) self.assertEqual( authors, "Charlotte: Poems, Jane Eyre\n" "Anne: Poems\n" "Emily: Poems, Wuthering Heights\n" "Jane: Sense and Sensibility\n", ) def test_using_is_honored_fkey(self): B = Book.objects.using("other") A = Author.objects.using("other") book1 = B.create(title="Poems") book2 = B.create(title="Sense and Sensibility") A.create(name="Charlotte Bronte", first_book=book1) A.create(name="Jane Austen", first_book=book2) # Forward with self.assertNumQueries(2, using="other"): books = ", ".join( a.first_book.title for a in A.prefetch_related("first_book") ) self.assertEqual("Poems, Sense and Sensibility", books) # Reverse with self.assertNumQueries(2, using="other"): books = "".join( "%s (%s)\n" % (b.title, ", ".join(a.name for a in b.first_time_authors.all())) for b in B.prefetch_related("first_time_authors") ) self.assertEqual( books, "Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n", ) def test_using_is_honored_inheritance(self): B = BookWithYear.objects.using("other") A = AuthorWithAge.objects.using("other") book1 = B.create(title="Poems", published_year=2010) B.create(title="More poems", published_year=2011) A.create(name="Jane", first_book=book1, age=50) A.create(name="Tom", first_book=book1, age=49) # parent link with self.assertNumQueries(2, using="other"): authors = ", ".join(a.author.name for a in A.prefetch_related("author")) self.assertEqual(authors, "Jane, Tom") # child link with self.assertNumQueries(2, using="other"): ages = ", ".join( str(a.authorwithage.age) for a in A.prefetch_related("authorwithage") ) self.assertEqual(ages, "50, 49") def test_using_is_honored_custom_qs(self): B = Book.objects.using("other") A = Author.objects.using("other") book1 = B.create(title="Poems") book2 = B.create(title="Sense and Sensibility") A.create(name="Charlotte Bronte", first_book=book1) A.create(name="Jane Austen", first_book=book2) # Implicit hinting with self.assertNumQueries(2, using="other"): prefetch = Prefetch("first_time_authors", queryset=Author.objects.all()) books = "".join( "%s (%s)\n" % (b.title, ", ".join(a.name for a in b.first_time_authors.all())) for b in B.prefetch_related(prefetch) ) self.assertEqual( books, "Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n", ) # Explicit using on the same db. with self.assertNumQueries(2, using="other"): prefetch = Prefetch( "first_time_authors", queryset=Author.objects.using("other") ) books = "".join( "%s (%s)\n" % (b.title, ", ".join(a.name for a in b.first_time_authors.all())) for b in B.prefetch_related(prefetch) ) self.assertEqual( books, "Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n", ) # Explicit using on a different db. with self.assertNumQueries(1, using="default"), self.assertNumQueries( 1, using="other" ): prefetch = Prefetch( "first_time_authors", queryset=Author.objects.using("default") ) books = "".join( "%s (%s)\n" % (b.title, ", ".join(a.name for a in b.first_time_authors.all())) for b in B.prefetch_related(prefetch) ) self.assertEqual(books, "Poems ()\n" "Sense and Sensibility ()\n") class Ticket19607Tests(TestCase): @classmethod def setUpTestData(cls): LessonEntry.objects.bulk_create( LessonEntry(id=id_, name1=name1, name2=name2) for id_, name1, name2 in [ (1, "einfach", "simple"), (2, "schwierig", "difficult"), ] ) WordEntry.objects.bulk_create( WordEntry(id=id_, lesson_entry_id=lesson_entry_id, name=name) for id_, lesson_entry_id, name in [ (1, 1, "einfach"), (2, 1, "simple"), (3, 2, "schwierig"), (4, 2, "difficult"), ] ) def test_bug(self): list( WordEntry.objects.prefetch_related( "lesson_entry", "lesson_entry__wordentry_set" ) ) class Ticket21410Tests(TestCase): @classmethod def setUpTestData(cls): book1 = Book.objects.create(title="Poems") book2 = Book.objects.create(title="Jane Eyre") book3 = Book.objects.create(title="Wuthering Heights") book4 = Book.objects.create(title="Sense and Sensibility") author1 = Author2.objects.create(name="Charlotte", first_book=book1) author2 = Author2.objects.create(name="Anne", first_book=book1) author3 = Author2.objects.create(name="Emily", first_book=book1) author4 = Author2.objects.create(name="Jane", first_book=book4) author1.favorite_books.add(book1, book2, book3) author2.favorite_books.add(book1) author3.favorite_books.add(book2) author4.favorite_books.add(book3) def test_bug(self): list(Author2.objects.prefetch_related("first_book", "favorite_books")) class Ticket21760Tests(TestCase): @classmethod def setUpTestData(cls): cls.rooms = [] for _ in range(3): house = House.objects.create() for _ in range(3): cls.rooms.append(Room.objects.create(house=house)) # Set main_room for each house before creating the next one for # databases where supports_nullable_unique_constraints is False. house.main_room = cls.rooms[-3] house.save() def test_bug(self): prefetcher = get_prefetcher(self.rooms[0], "house", "house")[0] queryset = prefetcher.get_prefetch_queryset(list(Room.objects.all()))[0] self.assertNotIn(" JOIN ", str(queryset.query)) class DirectPrefetchedObjectCacheReuseTests(TestCase): """ prefetch_related() reuses objects fetched in _prefetched_objects_cache. When objects are prefetched and not stored as an instance attribute (often intermediary relationships), they are saved to the _prefetched_objects_cache attribute. prefetch_related() takes _prefetched_objects_cache into account when determining whether an object has been fetched[1] and retrieves results from it when it is populated [2]. [1]: #25546 (duplicate queries on nested Prefetch) [2]: #27554 (queryset evaluation fails with a mix of nested and flattened prefetches) """ @classmethod def setUpTestData(cls): cls.book1, cls.book2 = [ Book.objects.create(title="book1"), Book.objects.create(title="book2"), ] cls.author11, cls.author12, cls.author21 = [ Author.objects.create(first_book=cls.book1, name="Author11"), Author.objects.create(first_book=cls.book1, name="Author12"), Author.objects.create(first_book=cls.book2, name="Author21"), ] cls.author1_address1, cls.author1_address2, cls.author2_address1 = [ AuthorAddress.objects.create(author=cls.author11, address="Happy place"), AuthorAddress.objects.create(author=cls.author12, address="Haunted house"), AuthorAddress.objects.create(author=cls.author21, address="Happy place"), ] cls.bookwithyear1 = BookWithYear.objects.create( title="Poems", published_year=2010 ) cls.bookreview1 = BookReview.objects.create(book=cls.bookwithyear1) def test_detect_is_fetched(self): """ Nested prefetch_related() shouldn't trigger duplicate queries for the same lookup. """ with self.assertNumQueries(3): books = Book.objects.filter(title__in=["book1", "book2"]).prefetch_related( Prefetch( "first_time_authors", Author.objects.prefetch_related( Prefetch( "addresses", AuthorAddress.objects.filter(address="Happy place"), ) ), ), ) book1, book2 = list(books) with self.assertNumQueries(0): self.assertSequenceEqual( book1.first_time_authors.all(), [self.author11, self.author12] ) self.assertSequenceEqual(book2.first_time_authors.all(), [self.author21]) self.assertSequenceEqual( book1.first_time_authors.all()[0].addresses.all(), [self.author1_address1], ) self.assertSequenceEqual( book1.first_time_authors.all()[1].addresses.all(), [] ) self.assertSequenceEqual( book2.first_time_authors.all()[0].addresses.all(), [self.author2_address1], ) self.assertEqual( list(book1.first_time_authors.all()), list(book1.first_time_authors.all().all()), ) self.assertEqual( list(book2.first_time_authors.all()), list(book2.first_time_authors.all().all()), ) self.assertEqual( list(book1.first_time_authors.all()[0].addresses.all()), list(book1.first_time_authors.all()[0].addresses.all().all()), ) self.assertEqual( list(book1.first_time_authors.all()[1].addresses.all()), list(book1.first_time_authors.all()[1].addresses.all().all()), ) self.assertEqual( list(book2.first_time_authors.all()[0].addresses.all()), list(book2.first_time_authors.all()[0].addresses.all().all()), ) def test_detect_is_fetched_with_to_attr(self): with self.assertNumQueries(3): books = Book.objects.filter(title__in=["book1", "book2"]).prefetch_related( Prefetch( "first_time_authors", Author.objects.prefetch_related( Prefetch( "addresses", AuthorAddress.objects.filter(address="Happy place"), to_attr="happy_place", ) ), to_attr="first_authors", ), ) book1, book2 = list(books) with self.assertNumQueries(0): self.assertEqual(book1.first_authors, [self.author11, self.author12]) self.assertEqual(book2.first_authors, [self.author21]) self.assertEqual( book1.first_authors[0].happy_place, [self.author1_address1] ) self.assertEqual(book1.first_authors[1].happy_place, []) self.assertEqual( book2.first_authors[0].happy_place, [self.author2_address1] ) def test_prefetch_reverse_foreign_key(self): with self.assertNumQueries(2): (bookwithyear1,) = BookWithYear.objects.prefetch_related("bookreview_set") with self.assertNumQueries(0): self.assertCountEqual( bookwithyear1.bookreview_set.all(), [self.bookreview1] ) with self.assertNumQueries(0): prefetch_related_objects([bookwithyear1], "bookreview_set") def test_add_clears_prefetched_objects(self): bookwithyear = BookWithYear.objects.get(pk=self.bookwithyear1.pk) prefetch_related_objects([bookwithyear], "bookreview_set") self.assertCountEqual(bookwithyear.bookreview_set.all(), [self.bookreview1]) new_review = BookReview.objects.create() bookwithyear.bookreview_set.add(new_review) self.assertCountEqual( bookwithyear.bookreview_set.all(), [self.bookreview1, new_review] ) def test_remove_clears_prefetched_objects(self): bookwithyear = BookWithYear.objects.get(pk=self.bookwithyear1.pk) prefetch_related_objects([bookwithyear], "bookreview_set") self.assertCountEqual(bookwithyear.bookreview_set.all(), [self.bookreview1]) bookwithyear.bookreview_set.remove(self.bookreview1) self.assertCountEqual(bookwithyear.bookreview_set.all(), []) class ReadPrefetchedObjectsCacheTests(TestCase): @classmethod def setUpTestData(cls): cls.book1 = Book.objects.create(title="Les confessions Volume I") cls.book2 = Book.objects.create(title="Candide") cls.author1 = AuthorWithAge.objects.create( name="Rousseau", first_book=cls.book1, age=70 ) cls.author2 = AuthorWithAge.objects.create( name="Voltaire", first_book=cls.book2, age=65 ) cls.book1.authors.add(cls.author1) cls.book2.authors.add(cls.author2) FavoriteAuthors.objects.create(author=cls.author1, likes_author=cls.author2) def test_retrieves_results_from_prefetched_objects_cache(self): """ When intermediary results are prefetched without a destination attribute, they are saved in the RelatedManager's cache (_prefetched_objects_cache). prefetch_related() uses this cache (#27554). """ authors = AuthorWithAge.objects.prefetch_related( Prefetch( "author", queryset=Author.objects.prefetch_related( # Results are saved in the RelatedManager's cache # (_prefetched_objects_cache) and do not replace the # RelatedManager on Author instances (favorite_authors) Prefetch("favorite_authors__first_book"), ), ), ) with self.assertNumQueries(4): # AuthorWithAge -> Author -> FavoriteAuthors, Book self.assertSequenceEqual(authors, [self.author1, self.author2]) class NestedPrefetchTests(TestCase): @classmethod def setUpTestData(cls): house = House.objects.create(name="Big house", address="123 Main St") cls.room = Room.objects.create(name="Kitchen", house=house) def test_nested_prefetch_is_not_overwritten_by_related_object(self): """ The prefetched relationship is used rather than populating the reverse relationship from the parent, when prefetching a set of child objects related to a set of parent objects and the child queryset itself specifies a prefetch back to the parent. """ queryset = House.objects.only("name").prefetch_related( Prefetch( "rooms", queryset=Room.objects.prefetch_related( Prefetch("house", queryset=House.objects.only("address")), ), ), ) with self.assertNumQueries(3): house = queryset.first() self.assertIs(Room.house.is_cached(self.room), True) with self.assertNumQueries(0): house.rooms.first().house.address class PrefetchLimitTests(TestDataMixin, TestCase): @skipUnlessDBFeature("supports_over_clause") def test_m2m_forward(self): authors = Author.objects.all() # Meta.ordering with self.assertNumQueries(3): books = list( Book.objects.prefetch_related( Prefetch("authors", authors), Prefetch("authors", authors[1:], to_attr="authors_sliced"), ) ) for book in books: with self.subTest(book=book): self.assertEqual(book.authors_sliced, list(book.authors.all())[1:]) @skipUnlessDBFeature("supports_over_clause") def test_m2m_reverse(self): books = Book.objects.order_by("title") with self.assertNumQueries(3): authors = list( Author.objects.prefetch_related( Prefetch("books", books), Prefetch("books", books[1:2], to_attr="books_sliced"), ) ) for author in authors: with self.subTest(author=author): self.assertEqual(author.books_sliced, list(author.books.all())[1:2]) @skipUnlessDBFeature("supports_over_clause") def test_foreignkey_reverse(self): authors = Author.objects.order_by("-name") with self.assertNumQueries(3): books = list( Book.objects.prefetch_related( Prefetch( "first_time_authors", authors, ), Prefetch( "first_time_authors", authors[1:], to_attr="first_time_authors_sliced", ), ) ) for book in books: with self.subTest(book=book): self.assertEqual( book.first_time_authors_sliced, list(book.first_time_authors.all())[1:], ) @skipUnlessDBFeature("supports_over_clause") def test_reverse_ordering(self): authors = Author.objects.reverse() # Reverse Meta.ordering with self.assertNumQueries(3): books = list( Book.objects.prefetch_related( Prefetch("authors", authors), Prefetch("authors", authors[1:], to_attr="authors_sliced"), ) ) for book in books: with self.subTest(book=book): self.assertEqual(book.authors_sliced, list(book.authors.all())[1:]) @skipIfDBFeature("supports_over_clause") def test_window_not_supported(self): authors = Author.objects.all() msg = ( "Prefetching from a limited queryset is only supported on backends that " "support window functions." ) with self.assertRaisesMessage(NotSupportedError, msg): list(Book.objects.prefetch_related(Prefetch("authors", authors[1:])))
cf90a3451ef4fc5c1e7e8b90627eac4046ac077acab41424928657e21dc509eb
from django.db import DatabaseError, connection from django.db.models import Index from django.test import TransactionTestCase, skipUnlessDBFeature from .models import ( Article, ArticleReporter, CheckConstraintModel, City, Comment, Country, DbCommentModel, District, Reporter, UniqueConstraintConditionModel, ) class IntrospectionTests(TransactionTestCase): available_apps = ["introspection"] def test_table_names(self): tl = connection.introspection.table_names() self.assertEqual(tl, sorted(tl)) self.assertIn( Reporter._meta.db_table, tl, "'%s' isn't in table_list()." % Reporter._meta.db_table, ) self.assertIn( Article._meta.db_table, tl, "'%s' isn't in table_list()." % Article._meta.db_table, ) def test_django_table_names(self): with connection.cursor() as cursor: cursor.execute("CREATE TABLE django_ixn_test_table (id INTEGER);") tl = connection.introspection.django_table_names() cursor.execute("DROP TABLE django_ixn_test_table;") self.assertNotIn( "django_ixn_test_table", tl, "django_table_names() returned a non-Django table", ) def test_django_table_names_retval_type(self): # Table name is a list #15216 tl = connection.introspection.django_table_names(only_existing=True) self.assertIs(type(tl), list) tl = connection.introspection.django_table_names(only_existing=False) self.assertIs(type(tl), list) def test_table_names_with_views(self): with connection.cursor() as cursor: try: cursor.execute( "CREATE VIEW introspection_article_view AS SELECT headline " "from introspection_article;" ) except DatabaseError as e: if "insufficient privileges" in str(e): self.fail("The test user has no CREATE VIEW privileges") else: raise try: self.assertIn( "introspection_article_view", connection.introspection.table_names(include_views=True), ) self.assertNotIn( "introspection_article_view", connection.introspection.table_names() ) finally: with connection.cursor() as cursor: cursor.execute("DROP VIEW introspection_article_view") def test_unmanaged_through_model(self): tables = connection.introspection.django_table_names() self.assertNotIn(ArticleReporter._meta.db_table, tables) def test_installed_models(self): tables = [Article._meta.db_table, Reporter._meta.db_table] models = connection.introspection.installed_models(tables) self.assertEqual(models, {Article, Reporter}) def test_sequence_list(self): sequences = connection.introspection.sequence_list() reporter_seqs = [ seq for seq in sequences if seq["table"] == Reporter._meta.db_table ] self.assertEqual( len(reporter_seqs), 1, "Reporter sequence not found in sequence_list()" ) self.assertEqual(reporter_seqs[0]["column"], "id") def test_get_table_description_names(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description( cursor, Reporter._meta.db_table ) self.assertEqual( [r[0] for r in desc], [f.column for f in Reporter._meta.fields] ) def test_get_table_description_types(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description( cursor, Reporter._meta.db_table ) self.assertEqual( [connection.introspection.get_field_type(r[1], r) for r in desc], [ connection.features.introspected_field_types[field] for field in ( "AutoField", "CharField", "CharField", "CharField", "BigIntegerField", "BinaryField", "SmallIntegerField", "DurationField", ) ], ) def test_get_table_description_col_lengths(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description( cursor, Reporter._meta.db_table ) self.assertEqual( [ r[2] for r in desc if connection.introspection.get_field_type(r[1], r) == "CharField" ], [30, 30, 254], ) def test_get_table_description_nullable(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description( cursor, Reporter._meta.db_table ) nullable_by_backend = connection.features.interprets_empty_strings_as_nulls self.assertEqual( [r[6] for r in desc], [ False, nullable_by_backend, nullable_by_backend, nullable_by_backend, True, True, False, False, ], ) def test_bigautofield(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description( cursor, City._meta.db_table ) self.assertIn( connection.features.introspected_field_types["BigAutoField"], [connection.introspection.get_field_type(r[1], r) for r in desc], ) def test_smallautofield(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description( cursor, Country._meta.db_table ) self.assertIn( connection.features.introspected_field_types["SmallAutoField"], [connection.introspection.get_field_type(r[1], r) for r in desc], ) @skipUnlessDBFeature("supports_comments") def test_db_comments(self): with connection.cursor() as cursor: desc = connection.introspection.get_table_description( cursor, DbCommentModel._meta.db_table ) table_list = connection.introspection.get_table_list(cursor) self.assertEqual( ["'Name' column comment"], [field.comment for field in desc if field.name == "name"], ) self.assertEqual( ["Custom table comment"], [ table.comment for table in table_list if table.name == "introspection_dbcommentmodel" ], ) # Regression test for #9991 - 'real' types in postgres @skipUnlessDBFeature("has_real_datatype") def test_postgresql_real_type(self): with connection.cursor() as cursor: cursor.execute("CREATE TABLE django_ixn_real_test_table (number REAL);") desc = connection.introspection.get_table_description( cursor, "django_ixn_real_test_table" ) cursor.execute("DROP TABLE django_ixn_real_test_table;") self.assertEqual( connection.introspection.get_field_type(desc[0][1], desc[0]), "FloatField" ) @skipUnlessDBFeature("can_introspect_foreign_keys") def test_get_relations(self): with connection.cursor() as cursor: relations = connection.introspection.get_relations( cursor, Article._meta.db_table ) # That's {field_name: (field_name_other_table, other_table)} expected_relations = { "reporter_id": ("id", Reporter._meta.db_table), "response_to_id": ("id", Article._meta.db_table), } self.assertEqual(relations, expected_relations) # Removing a field shouldn't disturb get_relations (#17785) body = Article._meta.get_field("body") with connection.schema_editor() as editor: editor.remove_field(Article, body) with connection.cursor() as cursor: relations = connection.introspection.get_relations( cursor, Article._meta.db_table ) with connection.schema_editor() as editor: editor.add_field(Article, body) self.assertEqual(relations, expected_relations) def test_get_primary_key_column(self): with connection.cursor() as cursor: primary_key_column = connection.introspection.get_primary_key_column( cursor, Article._meta.db_table ) pk_fk_column = connection.introspection.get_primary_key_column( cursor, District._meta.db_table ) self.assertEqual(primary_key_column, "id") self.assertEqual(pk_fk_column, "city_id") def test_get_constraints_index_types(self): with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor, Article._meta.db_table ) index = {} index2 = {} for val in constraints.values(): if val["columns"] == ["headline", "pub_date"]: index = val if val["columns"] == [ "headline", "response_to_id", "pub_date", "reporter_id", ]: index2 = val self.assertEqual(index["type"], Index.suffix) self.assertEqual(index2["type"], Index.suffix) @skipUnlessDBFeature("supports_index_column_ordering") def test_get_constraints_indexes_orders(self): """ Indexes have the 'orders' key with a list of 'ASC'/'DESC' values. """ with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor, Article._meta.db_table ) indexes_verified = 0 expected_columns = [ ["headline", "pub_date"], ["headline", "response_to_id", "pub_date", "reporter_id"], ] if connection.features.indexes_foreign_keys: expected_columns += [ ["reporter_id"], ["response_to_id"], ] for val in constraints.values(): if val["index"] and not (val["primary_key"] or val["unique"]): self.assertIn(val["columns"], expected_columns) self.assertEqual(val["orders"], ["ASC"] * len(val["columns"])) indexes_verified += 1 self.assertEqual(indexes_verified, len(expected_columns)) @skipUnlessDBFeature("supports_index_column_ordering", "supports_partial_indexes") def test_get_constraints_unique_indexes_orders(self): with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor, UniqueConstraintConditionModel._meta.db_table, ) self.assertIn("cond_name_without_color_uniq", constraints) constraint = constraints["cond_name_without_color_uniq"] self.assertIs(constraint["unique"], True) self.assertEqual(constraint["columns"], ["name"]) self.assertEqual(constraint["orders"], ["ASC"]) def test_get_constraints(self): def assertDetails( details, cols, primary_key=False, unique=False, index=False, check=False, foreign_key=None, ): # Different backends have different values for same constraints: # PRIMARY KEY UNIQUE CONSTRAINT UNIQUE INDEX # MySQL pk=1 uniq=1 idx=1 pk=0 uniq=1 idx=1 pk=0 uniq=1 idx=1 # PostgreSQL pk=1 uniq=1 idx=0 pk=0 uniq=1 idx=0 pk=0 uniq=1 idx=1 # SQLite pk=1 uniq=0 idx=0 pk=0 uniq=1 idx=0 pk=0 uniq=1 idx=1 if details["primary_key"]: details["unique"] = True if details["unique"]: details["index"] = False self.assertEqual(details["columns"], cols) self.assertEqual(details["primary_key"], primary_key) self.assertEqual(details["unique"], unique) self.assertEqual(details["index"], index) self.assertEqual(details["check"], check) self.assertEqual(details["foreign_key"], foreign_key) # Test custom constraints custom_constraints = { "article_email_pub_date_uniq", "email_pub_date_idx", } with connection.cursor() as cursor: constraints = connection.introspection.get_constraints( cursor, Comment._meta.db_table ) if ( connection.features.supports_column_check_constraints and connection.features.can_introspect_check_constraints ): constraints.update( connection.introspection.get_constraints( cursor, CheckConstraintModel._meta.db_table ) ) custom_constraints.add("up_votes_gte_0_check") assertDetails( constraints["up_votes_gte_0_check"], ["up_votes"], check=True ) assertDetails( constraints["article_email_pub_date_uniq"], ["article_id", "email", "pub_date"], unique=True, ) assertDetails( constraints["email_pub_date_idx"], ["email", "pub_date"], index=True ) # Test field constraints field_constraints = set() for name, details in constraints.items(): if name in custom_constraints: continue elif details["columns"] == ["up_votes"] and details["check"]: assertDetails(details, ["up_votes"], check=True) field_constraints.add(name) elif details["columns"] == ["voting_number"] and details["check"]: assertDetails(details, ["voting_number"], check=True) field_constraints.add(name) elif details["columns"] == ["ref"] and details["unique"]: assertDetails(details, ["ref"], unique=True) field_constraints.add(name) elif details["columns"] == ["voting_number"] and details["unique"]: assertDetails(details, ["voting_number"], unique=True) field_constraints.add(name) elif details["columns"] == ["article_id"] and details["index"]: assertDetails(details, ["article_id"], index=True) field_constraints.add(name) elif details["columns"] == ["id"] and details["primary_key"]: assertDetails(details, ["id"], primary_key=True, unique=True) field_constraints.add(name) elif details["columns"] == ["article_id"] and details["foreign_key"]: assertDetails( details, ["article_id"], foreign_key=("introspection_article", "id") ) field_constraints.add(name) elif details["check"]: # Some databases (e.g. Oracle) include additional check # constraints. field_constraints.add(name) # All constraints are accounted for. self.assertEqual( constraints.keys() ^ (custom_constraints | field_constraints), set() )
7d1e54fb42c67e15d5b0e2a34672e599d16a26068ac77d03cb531827cbee8f54
import os import re import shutil import tempfile import time import warnings from io import StringIO from pathlib import Path from unittest import mock, skipIf, skipUnless from admin_scripts.tests import AdminScriptTestCase from django.core import management from django.core.management import execute_from_command_line from django.core.management.base import CommandError from django.core.management.commands.makemessages import Command as MakeMessagesCommand from django.core.management.commands.makemessages import write_pot_file from django.core.management.utils import find_command from django.test import SimpleTestCase, override_settings from django.test.utils import captured_stderr, captured_stdout from django.utils._os import symlinks_supported from django.utils.translation import TranslatorCommentWarning from .utils import POFileAssertionMixin, RunInTmpDirMixin, copytree LOCALE = "de" has_xgettext = find_command("xgettext") gettext_version = MakeMessagesCommand().gettext_version if has_xgettext else None requires_gettext_019 = skipIf( has_xgettext and gettext_version < (0, 19), "gettext 0.19 required" ) @skipUnless(has_xgettext, "xgettext is mandatory for extraction tests") class ExtractorTests(POFileAssertionMixin, RunInTmpDirMixin, SimpleTestCase): work_subdir = "commands" PO_FILE = "locale/%s/LC_MESSAGES/django.po" % LOCALE def _run_makemessages(self, **options): out = StringIO() management.call_command( "makemessages", locale=[LOCALE], verbosity=2, stdout=out, **options ) output = out.getvalue() self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() return output, po_contents def assertMsgIdPlural(self, msgid, haystack, use_quotes=True): return self._assertPoKeyword( "msgid_plural", msgid, haystack, use_quotes=use_quotes ) def assertMsgStr(self, msgstr, haystack, use_quotes=True): return self._assertPoKeyword("msgstr", msgstr, haystack, use_quotes=use_quotes) def assertNotMsgId(self, msgid, s, use_quotes=True): if use_quotes: msgid = '"%s"' % msgid msgid = re.escape(msgid) return self.assertTrue(not re.search("^msgid %s" % msgid, s, re.MULTILINE)) def _assertPoLocComment( self, assert_presence, po_filename, line_number, *comment_parts ): with open(po_filename) as fp: po_contents = fp.read() if os.name == "nt": # #: .\path\to\file.html:123 cwd_prefix = "%s%s" % (os.curdir, os.sep) else: # #: path/to/file.html:123 cwd_prefix = "" path = os.path.join(cwd_prefix, *comment_parts) parts = [path] if isinstance(line_number, str): line_number = self._get_token_line_number(path, line_number) if line_number is not None: parts.append(":%d" % line_number) needle = "".join(parts) pattern = re.compile(r"^\#\:.*" + re.escape(needle), re.MULTILINE) if assert_presence: return self.assertRegex( po_contents, pattern, '"%s" not found in final .po file.' % needle ) else: return self.assertNotRegex( po_contents, pattern, '"%s" shouldn\'t be in final .po file.' % needle ) def _get_token_line_number(self, path, token): with open(path) as f: for line, content in enumerate(f, 1): if token in content: return line self.fail( "The token '%s' could not be found in %s, please check the test config" % (token, path) ) def assertLocationCommentPresent(self, po_filename, line_number, *comment_parts): r""" self.assertLocationCommentPresent('django.po', 42, 'dirA', 'dirB', 'foo.py') verifies that the django.po file has a gettext-style location comment of the form `#: dirA/dirB/foo.py:42` (or `#: .\dirA\dirB\foo.py:42` on Windows) None can be passed for the line_number argument to skip checking of the :42 suffix part. A string token can also be passed as line_number, in which case it will be searched in the template, and its line number will be used. A msgid is a suitable candidate. """ return self._assertPoLocComment(True, po_filename, line_number, *comment_parts) def assertLocationCommentNotPresent(self, po_filename, line_number, *comment_parts): """Check the opposite of assertLocationComment()""" return self._assertPoLocComment(False, po_filename, line_number, *comment_parts) def assertRecentlyModified(self, path): """ Assert that file was recently modified (modification time was less than 10 seconds ago). """ delta = time.time() - os.stat(path).st_mtime self.assertLess(delta, 10, "%s was recently modified" % path) def assertNotRecentlyModified(self, path): """ Assert that file was not recently modified (modification time was more than 10 seconds ago). """ delta = time.time() - os.stat(path).st_mtime self.assertGreater(delta, 10, "%s wasn't recently modified" % path) class BasicExtractorTests(ExtractorTests): @override_settings(USE_I18N=False) def test_use_i18n_false(self): """ makemessages also runs successfully when USE_I18N is False. """ management.call_command("makemessages", locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE, encoding="utf-8") as fp: po_contents = fp.read() # Check two random strings self.assertIn("#. Translators: One-line translator comment #1", po_contents) self.assertIn('msgctxt "Special trans context #1"', po_contents) def test_no_option(self): # One of either the --locale, --exclude, or --all options is required. msg = "Type 'manage.py help makemessages' for usage information." with mock.patch( "django.core.management.commands.makemessages.sys.argv", ["manage.py", "makemessages"], ): with self.assertRaisesRegex(CommandError, msg): management.call_command("makemessages") def test_valid_locale(self): out = StringIO() management.call_command("makemessages", locale=["de"], stdout=out, verbosity=1) self.assertNotIn("invalid locale de", out.getvalue()) self.assertIn("processing locale de", out.getvalue()) self.assertIs(Path(self.PO_FILE).exists(), True) def test_valid_locale_with_country(self): out = StringIO() management.call_command( "makemessages", locale=["en_GB"], stdout=out, verbosity=1 ) self.assertNotIn("invalid locale en_GB", out.getvalue()) self.assertIn("processing locale en_GB", out.getvalue()) self.assertIs(Path("locale/en_GB/LC_MESSAGES/django.po").exists(), True) def test_valid_locale_tachelhit_latin_morocco(self): out = StringIO() management.call_command( "makemessages", locale=["shi_Latn_MA"], stdout=out, verbosity=1 ) self.assertNotIn("invalid locale shi_Latn_MA", out.getvalue()) self.assertIn("processing locale shi_Latn_MA", out.getvalue()) self.assertIs(Path("locale/shi_Latn_MA/LC_MESSAGES/django.po").exists(), True) def test_valid_locale_private_subtag(self): out = StringIO() management.call_command( "makemessages", locale=["nl_NL-x-informal"], stdout=out, verbosity=1 ) self.assertNotIn("invalid locale nl_NL-x-informal", out.getvalue()) self.assertIn("processing locale nl_NL-x-informal", out.getvalue()) self.assertIs( Path("locale/nl_NL-x-informal/LC_MESSAGES/django.po").exists(), True ) def test_invalid_locale_uppercase(self): out = StringIO() management.call_command("makemessages", locale=["PL"], stdout=out, verbosity=1) self.assertIn("invalid locale PL, did you mean pl?", out.getvalue()) self.assertNotIn("processing locale pl", out.getvalue()) self.assertIs(Path("locale/pl/LC_MESSAGES/django.po").exists(), False) def test_invalid_locale_hyphen(self): out = StringIO() management.call_command( "makemessages", locale=["pl-PL"], stdout=out, verbosity=1 ) self.assertIn("invalid locale pl-PL, did you mean pl_PL?", out.getvalue()) self.assertNotIn("processing locale pl-PL", out.getvalue()) self.assertIs(Path("locale/pl-PL/LC_MESSAGES/django.po").exists(), False) def test_invalid_locale_lower_country(self): out = StringIO() management.call_command( "makemessages", locale=["pl_pl"], stdout=out, verbosity=1 ) self.assertIn("invalid locale pl_pl, did you mean pl_PL?", out.getvalue()) self.assertNotIn("processing locale pl_pl", out.getvalue()) self.assertIs(Path("locale/pl_pl/LC_MESSAGES/django.po").exists(), False) def test_invalid_locale_private_subtag(self): out = StringIO() management.call_command( "makemessages", locale=["nl-nl-x-informal"], stdout=out, verbosity=1 ) self.assertIn( "invalid locale nl-nl-x-informal, did you mean nl_NL-x-informal?", out.getvalue(), ) self.assertNotIn("processing locale nl-nl-x-informal", out.getvalue()) self.assertIs( Path("locale/nl-nl-x-informal/LC_MESSAGES/django.po").exists(), False ) def test_invalid_locale_plus(self): out = StringIO() management.call_command( "makemessages", locale=["en+GB"], stdout=out, verbosity=1 ) self.assertIn("invalid locale en+GB, did you mean en_GB?", out.getvalue()) self.assertNotIn("processing locale en+GB", out.getvalue()) self.assertIs(Path("locale/en+GB/LC_MESSAGES/django.po").exists(), False) def test_invalid_locale_end_with_underscore(self): out = StringIO() management.call_command("makemessages", locale=["en_"], stdout=out, verbosity=1) self.assertIn("invalid locale en_", out.getvalue()) self.assertNotIn("processing locale en_", out.getvalue()) self.assertIs(Path("locale/en_/LC_MESSAGES/django.po").exists(), False) def test_invalid_locale_start_with_underscore(self): out = StringIO() management.call_command("makemessages", locale=["_en"], stdout=out, verbosity=1) self.assertIn("invalid locale _en", out.getvalue()) self.assertNotIn("processing locale _en", out.getvalue()) self.assertIs(Path("locale/_en/LC_MESSAGES/django.po").exists(), False) def test_comments_extractor(self): management.call_command("makemessages", locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE, encoding="utf-8") as fp: po_contents = fp.read() self.assertNotIn("This comment should not be extracted", po_contents) # Comments in templates self.assertIn( "#. Translators: This comment should be extracted", po_contents ) self.assertIn( "#. Translators: Django comment block for translators\n#. " "string's meaning unveiled", po_contents, ) self.assertIn("#. Translators: One-line translator comment #1", po_contents) self.assertIn( "#. Translators: Two-line translator comment #1\n#. continued here.", po_contents, ) self.assertIn("#. Translators: One-line translator comment #2", po_contents) self.assertIn( "#. Translators: Two-line translator comment #2\n#. continued here.", po_contents, ) self.assertIn("#. Translators: One-line translator comment #3", po_contents) self.assertIn( "#. Translators: Two-line translator comment #3\n#. continued here.", po_contents, ) self.assertIn("#. Translators: One-line translator comment #4", po_contents) self.assertIn( "#. Translators: Two-line translator comment #4\n#. continued here.", po_contents, ) self.assertIn( "#. Translators: One-line translator comment #5 -- with " "non ASCII characters: áéíóúö", po_contents, ) self.assertIn( "#. Translators: Two-line translator comment #5 -- with " "non ASCII characters: áéíóúö\n#. continued here.", po_contents, ) def test_special_char_extracted(self): management.call_command("makemessages", locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE, encoding="utf-8") as fp: po_contents = fp.read() self.assertMsgId("Non-breaking space\u00a0:", po_contents) def test_blocktranslate_trimmed(self): management.call_command("makemessages", locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() # should not be trimmed self.assertNotMsgId("Text with a few line breaks.", po_contents) # should be trimmed self.assertMsgId( "Again some text with a few line breaks, this time should be trimmed.", po_contents, ) # #21406 -- Should adjust for eaten line numbers self.assertMsgId("Get my line number", po_contents) self.assertLocationCommentPresent( self.PO_FILE, "Get my line number", "templates", "test.html" ) def test_extraction_error(self): msg = ( "Translation blocks must not include other block tags: blocktranslate " "(file %s, line 3)" % os.path.join("templates", "template_with_error.tpl") ) with self.assertRaisesMessage(SyntaxError, msg): management.call_command( "makemessages", locale=[LOCALE], extensions=["tpl"], verbosity=0 ) # The temporary files were cleaned up. self.assertFalse(os.path.exists("./templates/template_with_error.tpl.py")) self.assertFalse(os.path.exists("./templates/template_0_with_no_error.tpl.py")) def test_unicode_decode_error(self): shutil.copyfile("./not_utf8.sample", "./not_utf8.txt") out = StringIO() management.call_command("makemessages", locale=[LOCALE], stdout=out) self.assertIn( "UnicodeDecodeError: skipped file not_utf8.txt in .", out.getvalue() ) def test_unicode_file_name(self): open(os.path.join(self.test_dir, "vidéo.txt"), "a").close() management.call_command("makemessages", locale=[LOCALE], verbosity=0) def test_extraction_warning(self): """test xgettext warning about multiple bare interpolation placeholders""" shutil.copyfile("./code.sample", "./code_sample.py") out = StringIO() management.call_command("makemessages", locale=[LOCALE], stdout=out) self.assertIn("code_sample.py:4", out.getvalue()) def test_template_message_context_extractor(self): """ Message contexts are correctly extracted for the {% translate %} and {% blocktranslate %} template tags (#14806). """ management.call_command("makemessages", locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() # {% translate %} self.assertIn('msgctxt "Special trans context #1"', po_contents) self.assertMsgId("Translatable literal #7a", po_contents) self.assertIn('msgctxt "Special trans context #2"', po_contents) self.assertMsgId("Translatable literal #7b", po_contents) self.assertIn('msgctxt "Special trans context #3"', po_contents) self.assertMsgId("Translatable literal #7c", po_contents) # {% translate %} with a filter for ( minor_part ) in "abcdefgh": # Iterate from #7.1a to #7.1h template markers self.assertIn( 'msgctxt "context #7.1{}"'.format(minor_part), po_contents ) self.assertMsgId( "Translatable literal #7.1{}".format(minor_part), po_contents ) # {% blocktranslate %} self.assertIn('msgctxt "Special blocktranslate context #1"', po_contents) self.assertMsgId("Translatable literal #8a", po_contents) self.assertIn('msgctxt "Special blocktranslate context #2"', po_contents) self.assertMsgId("Translatable literal #8b-singular", po_contents) self.assertIn("Translatable literal #8b-plural", po_contents) self.assertIn('msgctxt "Special blocktranslate context #3"', po_contents) self.assertMsgId("Translatable literal #8c-singular", po_contents) self.assertIn("Translatable literal #8c-plural", po_contents) self.assertIn('msgctxt "Special blocktranslate context #4"', po_contents) self.assertMsgId("Translatable literal #8d %(a)s", po_contents) # {% trans %} and {% blocktrans %} self.assertMsgId("trans text", po_contents) self.assertMsgId("blocktrans text", po_contents) def test_context_in_single_quotes(self): management.call_command("makemessages", locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() # {% translate %} self.assertIn('msgctxt "Context wrapped in double quotes"', po_contents) self.assertIn('msgctxt "Context wrapped in single quotes"', po_contents) # {% blocktranslate %} self.assertIn( 'msgctxt "Special blocktranslate context wrapped in double quotes"', po_contents, ) self.assertIn( 'msgctxt "Special blocktranslate context wrapped in single quotes"', po_contents, ) def test_template_comments(self): """Template comment tags on the same line of other constructs (#19552)""" # Test detection/end user reporting of old, incorrect templates # translator comments syntax with warnings.catch_warnings(record=True) as ws: warnings.simplefilter("always") management.call_command( "makemessages", locale=[LOCALE], extensions=["thtml"], verbosity=0 ) self.assertEqual(len(ws), 3) for w in ws: self.assertTrue(issubclass(w.category, TranslatorCommentWarning)) self.assertRegex( str(ws[0].message), r"The translator-targeted comment 'Translators: ignored i18n " r"comment #1' \(file templates[/\\]comments.thtml, line 4\) " r"was ignored, because it wasn't the last item on the line\.", ) self.assertRegex( str(ws[1].message), r"The translator-targeted comment 'Translators: ignored i18n " r"comment #3' \(file templates[/\\]comments.thtml, line 6\) " r"was ignored, because it wasn't the last item on the line\.", ) self.assertRegex( str(ws[2].message), r"The translator-targeted comment 'Translators: ignored i18n " r"comment #4' \(file templates[/\\]comments.thtml, line 8\) " r"was ignored, because it wasn't the last item on the line\.", ) # Now test .po file contents self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertMsgId("Translatable literal #9a", po_contents) self.assertNotIn("ignored comment #1", po_contents) self.assertNotIn("Translators: ignored i18n comment #1", po_contents) self.assertMsgId("Translatable literal #9b", po_contents) self.assertNotIn("ignored i18n comment #2", po_contents) self.assertNotIn("ignored comment #2", po_contents) self.assertMsgId("Translatable literal #9c", po_contents) self.assertNotIn("ignored comment #3", po_contents) self.assertNotIn("ignored i18n comment #3", po_contents) self.assertMsgId("Translatable literal #9d", po_contents) self.assertNotIn("ignored comment #4", po_contents) self.assertMsgId("Translatable literal #9e", po_contents) self.assertNotIn("ignored comment #5", po_contents) self.assertNotIn("ignored i18n comment #4", po_contents) self.assertMsgId("Translatable literal #9f", po_contents) self.assertIn("#. Translators: valid i18n comment #5", po_contents) self.assertMsgId("Translatable literal #9g", po_contents) self.assertIn("#. Translators: valid i18n comment #6", po_contents) self.assertMsgId("Translatable literal #9h", po_contents) self.assertIn("#. Translators: valid i18n comment #7", po_contents) self.assertMsgId("Translatable literal #9i", po_contents) self.assertRegex(po_contents, r"#\..+Translators: valid i18n comment #8") self.assertRegex(po_contents, r"#\..+Translators: valid i18n comment #9") self.assertMsgId("Translatable literal #9j", po_contents) def test_makemessages_find_files(self): """ find_files only discover files having the proper extensions. """ cmd = MakeMessagesCommand() cmd.ignore_patterns = ["CVS", ".*", "*~", "*.pyc"] cmd.symlinks = False cmd.domain = "django" cmd.extensions = [".html", ".txt", ".py"] cmd.verbosity = 0 cmd.locale_paths = [] cmd.default_locale_path = os.path.join(self.test_dir, "locale") found_files = cmd.find_files(self.test_dir) self.assertGreater(len(found_files), 1) found_exts = {os.path.splitext(tfile.file)[1] for tfile in found_files} self.assertEqual(found_exts.difference({".py", ".html", ".txt"}), set()) cmd.extensions = [".js"] cmd.domain = "djangojs" found_files = cmd.find_files(self.test_dir) self.assertGreater(len(found_files), 1) found_exts = {os.path.splitext(tfile.file)[1] for tfile in found_files} self.assertEqual(found_exts.difference({".js"}), set()) @mock.patch("django.core.management.commands.makemessages.popen_wrapper") def test_makemessages_gettext_version(self, mocked_popen_wrapper): # "Normal" output: mocked_popen_wrapper.return_value = ( "xgettext (GNU gettext-tools) 0.18.1\n" "Copyright (C) 1995-1998, 2000-2010 Free Software Foundation, Inc.\n" "License GPLv3+: GNU GPL version 3 or later " "<http://gnu.org/licenses/gpl.html>\n" "This is free software: you are free to change and redistribute it.\n" "There is NO WARRANTY, to the extent permitted by law.\n" "Written by Ulrich Drepper.\n", "", 0, ) cmd = MakeMessagesCommand() self.assertEqual(cmd.gettext_version, (0, 18, 1)) # Version number with only 2 parts (#23788) mocked_popen_wrapper.return_value = ( "xgettext (GNU gettext-tools) 0.17\n", "", 0, ) cmd = MakeMessagesCommand() self.assertEqual(cmd.gettext_version, (0, 17)) # Bad version output mocked_popen_wrapper.return_value = ("any other return value\n", "", 0) cmd = MakeMessagesCommand() with self.assertRaisesMessage( CommandError, "Unable to get gettext version. Is it installed?" ): cmd.gettext_version def test_po_file_encoding_when_updating(self): """ Update of PO file doesn't corrupt it with non-UTF-8 encoding on Windows (#23271). """ BR_PO_BASE = "locale/pt_BR/LC_MESSAGES/django" shutil.copyfile(BR_PO_BASE + ".pristine", BR_PO_BASE + ".po") management.call_command("makemessages", locale=["pt_BR"], verbosity=0) self.assertTrue(os.path.exists(BR_PO_BASE + ".po")) with open(BR_PO_BASE + ".po", encoding="utf-8") as fp: po_contents = fp.read() self.assertMsgStr("Größe", po_contents) def test_pot_charset_header_is_utf8(self): """Content-Type: ... charset=CHARSET is replaced with charset=UTF-8""" msgs = ( "# SOME DESCRIPTIVE TITLE.\n" "# (some lines truncated as they are not relevant)\n" '"Content-Type: text/plain; charset=CHARSET\\n"\n' '"Content-Transfer-Encoding: 8bit\\n"\n' "\n" "#: somefile.py:8\n" 'msgid "mañana; charset=CHARSET"\n' 'msgstr ""\n' ) with tempfile.NamedTemporaryFile() as pot_file: pot_filename = pot_file.name write_pot_file(pot_filename, msgs) with open(pot_filename, encoding="utf-8") as fp: pot_contents = fp.read() self.assertIn("Content-Type: text/plain; charset=UTF-8", pot_contents) self.assertIn("mañana; charset=CHARSET", pot_contents) class JavaScriptExtractorTests(ExtractorTests): PO_FILE = "locale/%s/LC_MESSAGES/djangojs.po" % LOCALE def test_javascript_literals(self): _, po_contents = self._run_makemessages(domain="djangojs") self.assertMsgId("This literal should be included.", po_contents) self.assertMsgId("gettext_noop should, too.", po_contents) self.assertMsgId("This one as well.", po_contents) self.assertMsgId(r"He said, \"hello\".", po_contents) self.assertMsgId("okkkk", po_contents) self.assertMsgId("TEXT", po_contents) self.assertMsgId("It's at http://example.com", po_contents) self.assertMsgId("String", po_contents) self.assertMsgId( "/* but this one will be too */ 'cause there is no way of telling...", po_contents, ) self.assertMsgId("foo", po_contents) self.assertMsgId("bar", po_contents) self.assertMsgId("baz", po_contents) self.assertMsgId("quz", po_contents) self.assertMsgId("foobar", po_contents) def test_media_static_dirs_ignored(self): """ Regression test for #23583. """ with override_settings( STATIC_ROOT=os.path.join(self.test_dir, "static/"), MEDIA_ROOT=os.path.join(self.test_dir, "media_root/"), ): _, po_contents = self._run_makemessages(domain="djangojs") self.assertMsgId( "Static content inside app should be included.", po_contents ) self.assertNotMsgId( "Content from STATIC_ROOT should not be included", po_contents ) @override_settings(STATIC_ROOT=None, MEDIA_ROOT="") def test_default_root_settings(self): """ Regression test for #23717. """ _, po_contents = self._run_makemessages(domain="djangojs") self.assertMsgId("Static content inside app should be included.", po_contents) class IgnoredExtractorTests(ExtractorTests): def test_ignore_directory(self): out, po_contents = self._run_makemessages( ignore_patterns=[ os.path.join("ignore_dir", "*"), ] ) self.assertIn("ignoring directory ignore_dir", out) self.assertMsgId("This literal should be included.", po_contents) self.assertNotMsgId("This should be ignored.", po_contents) def test_ignore_subdirectory(self): out, po_contents = self._run_makemessages( ignore_patterns=[ "templates/*/ignore.html", "templates/subdir/*", ] ) self.assertIn("ignoring directory subdir", out) self.assertNotMsgId("This subdir should be ignored too.", po_contents) def test_ignore_file_patterns(self): out, po_contents = self._run_makemessages( ignore_patterns=[ "xxx_*", ] ) self.assertIn("ignoring file xxx_ignored.html", out) self.assertNotMsgId("This should be ignored too.", po_contents) def test_media_static_dirs_ignored(self): with override_settings( STATIC_ROOT=os.path.join(self.test_dir, "static/"), MEDIA_ROOT=os.path.join(self.test_dir, "media_root/"), ): out, _ = self._run_makemessages() self.assertIn("ignoring directory static", out) self.assertIn("ignoring directory media_root", out) class SymlinkExtractorTests(ExtractorTests): def setUp(self): super().setUp() self.symlinked_dir = os.path.join(self.test_dir, "templates_symlinked") def test_symlink(self): if symlinks_supported(): os.symlink(os.path.join(self.test_dir, "templates"), self.symlinked_dir) else: self.skipTest( "os.symlink() not available on this OS + Python version combination." ) management.call_command( "makemessages", locale=[LOCALE], verbosity=0, symlinks=True ) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertMsgId("This literal should be included.", po_contents) self.assertLocationCommentPresent( self.PO_FILE, None, "templates_symlinked", "test.html" ) class CopyPluralFormsExtractorTests(ExtractorTests): PO_FILE_ES = "locale/es/LC_MESSAGES/django.po" def test_copy_plural_forms(self): management.call_command("makemessages", locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertIn("Plural-Forms: nplurals=2; plural=(n != 1)", po_contents) def test_override_plural_forms(self): """Ticket #20311.""" management.call_command( "makemessages", locale=["es"], extensions=["djtpl"], verbosity=0 ) self.assertTrue(os.path.exists(self.PO_FILE_ES)) with open(self.PO_FILE_ES, encoding="utf-8") as fp: po_contents = fp.read() found = re.findall( r'^(?P<value>"Plural-Forms.+?\\n")\s*$', po_contents, re.MULTILINE | re.DOTALL, ) self.assertEqual(1, len(found)) def test_translate_and_plural_blocktranslate_collision(self): """ Ensures a correct workaround for the gettext bug when handling a literal found inside a {% translate %} tag and also in another file inside a {% blocktranslate %} with a plural (#17375). """ management.call_command( "makemessages", locale=[LOCALE], extensions=["html", "djtpl"], verbosity=0 ) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertNotIn( "#-#-#-#-# django.pot (PACKAGE VERSION) #-#-#-#-#\\n", po_contents ) self.assertMsgId( "First `translate`, then `blocktranslate` with a plural", po_contents ) self.assertMsgIdPlural( "Plural for a `translate` and `blocktranslate` collision case", po_contents, ) class NoWrapExtractorTests(ExtractorTests): def test_no_wrap_enabled(self): management.call_command( "makemessages", locale=[LOCALE], verbosity=0, no_wrap=True ) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertMsgId( "This literal should also be included wrapped or not wrapped " "depending on the use of the --no-wrap option.", po_contents, ) def test_no_wrap_disabled(self): management.call_command( "makemessages", locale=[LOCALE], verbosity=0, no_wrap=False ) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertMsgId( '""\n"This literal should also be included wrapped or not ' 'wrapped depending on the "\n"use of the --no-wrap option."', po_contents, use_quotes=False, ) class LocationCommentsTests(ExtractorTests): def test_no_location_enabled(self): """Behavior is correct if --no-location switch is specified. See #16903.""" management.call_command( "makemessages", locale=[LOCALE], verbosity=0, no_location=True ) self.assertTrue(os.path.exists(self.PO_FILE)) self.assertLocationCommentNotPresent(self.PO_FILE, None, "test.html") def test_no_location_disabled(self): """Behavior is correct if --no-location switch isn't specified.""" management.call_command( "makemessages", locale=[LOCALE], verbosity=0, no_location=False ) self.assertTrue(os.path.exists(self.PO_FILE)) # #16903 -- Standard comment with source file relative path should be present self.assertLocationCommentPresent( self.PO_FILE, "Translatable literal #6b", "templates", "test.html" ) def test_location_comments_for_templatized_files(self): """ Ensure no leaky paths in comments, e.g. #: path\to\file.html.py:123 Refs #21209/#26341. """ management.call_command("makemessages", locale=[LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE)) with open(self.PO_FILE) as fp: po_contents = fp.read() self.assertMsgId("#: templates/test.html.py", po_contents) self.assertLocationCommentNotPresent(self.PO_FILE, None, ".html.py") self.assertLocationCommentPresent(self.PO_FILE, 5, "templates", "test.html") @requires_gettext_019 def test_add_location_full(self): """makemessages --add-location=full""" management.call_command( "makemessages", locale=[LOCALE], verbosity=0, add_location="full" ) self.assertTrue(os.path.exists(self.PO_FILE)) # Comment with source file relative path and line number is present. self.assertLocationCommentPresent( self.PO_FILE, "Translatable literal #6b", "templates", "test.html" ) @requires_gettext_019 def test_add_location_file(self): """makemessages --add-location=file""" management.call_command( "makemessages", locale=[LOCALE], verbosity=0, add_location="file" ) self.assertTrue(os.path.exists(self.PO_FILE)) # Comment with source file relative path is present. self.assertLocationCommentPresent(self.PO_FILE, None, "templates", "test.html") # But it should not contain the line number. self.assertLocationCommentNotPresent( self.PO_FILE, "Translatable literal #6b", "templates", "test.html" ) @requires_gettext_019 def test_add_location_never(self): """makemessages --add-location=never""" management.call_command( "makemessages", locale=[LOCALE], verbosity=0, add_location="never" ) self.assertTrue(os.path.exists(self.PO_FILE)) self.assertLocationCommentNotPresent(self.PO_FILE, None, "test.html") @mock.patch( "django.core.management.commands.makemessages.Command.gettext_version", new=(0, 18, 99), ) def test_add_location_gettext_version_check(self): """ CommandError is raised when using makemessages --add-location with gettext < 0.19. """ msg = ( "The --add-location option requires gettext 0.19 or later. You have " "0.18.99." ) with self.assertRaisesMessage(CommandError, msg): management.call_command( "makemessages", locale=[LOCALE], verbosity=0, add_location="full" ) class KeepPotFileExtractorTests(ExtractorTests): POT_FILE = "locale/django.pot" def test_keep_pot_disabled_by_default(self): management.call_command("makemessages", locale=[LOCALE], verbosity=0) self.assertFalse(os.path.exists(self.POT_FILE)) def test_keep_pot_explicitly_disabled(self): management.call_command( "makemessages", locale=[LOCALE], verbosity=0, keep_pot=False ) self.assertFalse(os.path.exists(self.POT_FILE)) def test_keep_pot_enabled(self): management.call_command( "makemessages", locale=[LOCALE], verbosity=0, keep_pot=True ) self.assertTrue(os.path.exists(self.POT_FILE)) class MultipleLocaleExtractionTests(ExtractorTests): PO_FILE_PT = "locale/pt/LC_MESSAGES/django.po" PO_FILE_DE = "locale/de/LC_MESSAGES/django.po" PO_FILE_KO = "locale/ko/LC_MESSAGES/django.po" LOCALES = ["pt", "de", "ch"] def test_multiple_locales(self): management.call_command("makemessages", locale=["pt", "de"], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE_PT)) self.assertTrue(os.path.exists(self.PO_FILE_DE)) def test_all_locales(self): """ When the `locale` flag is absent, all dirs from the parent locale dir are considered as language directories, except if the directory doesn't start with two letters (which excludes __pycache__, .gitignore, etc.). """ os.mkdir(os.path.join("locale", "_do_not_pick")) # Excluding locales that do not compile management.call_command("makemessages", exclude=["ja", "es_AR"], verbosity=0) self.assertTrue(os.path.exists(self.PO_FILE_KO)) self.assertFalse(os.path.exists("locale/_do_not_pick/LC_MESSAGES/django.po")) class ExcludedLocaleExtractionTests(ExtractorTests): work_subdir = "exclude" LOCALES = ["en", "fr", "it"] PO_FILE = "locale/%s/LC_MESSAGES/django.po" def _set_times_for_all_po_files(self): """ Set access and modification times to the Unix epoch time for all the .po files. """ for locale in self.LOCALES: os.utime(self.PO_FILE % locale, (0, 0)) def setUp(self): super().setUp() copytree("canned_locale", "locale") self._set_times_for_all_po_files() def test_command_help(self): with captured_stdout(), captured_stderr(): # `call_command` bypasses the parser; by calling # `execute_from_command_line` with the help subcommand we # ensure that there are no issues with the parser itself. execute_from_command_line(["django-admin", "help", "makemessages"]) def test_one_locale_excluded(self): management.call_command("makemessages", exclude=["it"], verbosity=0) self.assertRecentlyModified(self.PO_FILE % "en") self.assertRecentlyModified(self.PO_FILE % "fr") self.assertNotRecentlyModified(self.PO_FILE % "it") def test_multiple_locales_excluded(self): management.call_command("makemessages", exclude=["it", "fr"], verbosity=0) self.assertRecentlyModified(self.PO_FILE % "en") self.assertNotRecentlyModified(self.PO_FILE % "fr") self.assertNotRecentlyModified(self.PO_FILE % "it") def test_one_locale_excluded_with_locale(self): management.call_command( "makemessages", locale=["en", "fr"], exclude=["fr"], verbosity=0 ) self.assertRecentlyModified(self.PO_FILE % "en") self.assertNotRecentlyModified(self.PO_FILE % "fr") self.assertNotRecentlyModified(self.PO_FILE % "it") def test_multiple_locales_excluded_with_locale(self): management.call_command( "makemessages", locale=["en", "fr", "it"], exclude=["fr", "it"], verbosity=0 ) self.assertRecentlyModified(self.PO_FILE % "en") self.assertNotRecentlyModified(self.PO_FILE % "fr") self.assertNotRecentlyModified(self.PO_FILE % "it") class CustomLayoutExtractionTests(ExtractorTests): work_subdir = "project_dir" def test_no_locale_raises(self): msg = ( "Unable to find a locale path to store translations for file " "__init__.py. Make sure the 'locale' directory exists in an app " "or LOCALE_PATHS setting is set." ) with self.assertRaisesMessage(management.CommandError, msg): management.call_command("makemessages", locale=[LOCALE], verbosity=0) # Working files are cleaned up on an error. self.assertFalse(os.path.exists("./app_no_locale/test.html.py")) def test_project_locale_paths(self): self._test_project_locale_paths(os.path.join(self.test_dir, "project_locale")) def test_project_locale_paths_pathlib(self): self._test_project_locale_paths(Path(self.test_dir) / "project_locale") def _test_project_locale_paths(self, locale_path): """ * translations for an app containing a locale folder are stored in that folder * translations outside of that app are in LOCALE_PATHS[0] """ with override_settings(LOCALE_PATHS=[locale_path]): management.call_command("makemessages", locale=[LOCALE], verbosity=0) project_de_locale = os.path.join( self.test_dir, "project_locale", "de", "LC_MESSAGES", "django.po" ) app_de_locale = os.path.join( self.test_dir, "app_with_locale", "locale", "de", "LC_MESSAGES", "django.po", ) self.assertTrue(os.path.exists(project_de_locale)) self.assertTrue(os.path.exists(app_de_locale)) with open(project_de_locale) as fp: po_contents = fp.read() self.assertMsgId("This app has no locale directory", po_contents) self.assertMsgId("This is a project-level string", po_contents) with open(app_de_locale) as fp: po_contents = fp.read() self.assertMsgId("This app has a locale directory", po_contents) @skipUnless(has_xgettext, "xgettext is mandatory for extraction tests") class NoSettingsExtractionTests(AdminScriptTestCase): def test_makemessages_no_settings(self): out, err = self.run_django_admin(["makemessages", "-l", "en", "-v", "0"]) self.assertNoOutput(err) self.assertNoOutput(out) class UnchangedPoExtractionTests(ExtractorTests): work_subdir = "unchanged" def setUp(self): super().setUp() po_file = Path(self.PO_FILE) po_file_tmp = Path(self.PO_FILE + ".tmp") if os.name == "nt": # msgmerge outputs Windows style paths on Windows. po_contents = po_file_tmp.read_text().replace( "#: __init__.py", "#: .\\__init__.py", ) po_file.write_text(po_contents) else: po_file_tmp.rename(po_file) self.original_po_contents = po_file.read_text() def test_po_remains_unchanged(self): """PO files are unchanged unless there are new changes.""" _, po_contents = self._run_makemessages() self.assertEqual(po_contents, self.original_po_contents) def test_po_changed_with_new_strings(self): """PO files are updated when new changes are detected.""" Path("models.py.tmp").rename("models.py") _, po_contents = self._run_makemessages() self.assertNotEqual(po_contents, self.original_po_contents) self.assertMsgId( "This is a hitherto undiscovered translatable string.", po_contents, )
d352df90d1b723db6622cfda4886823c3e5f3732c6891e23faf03a931c756ea2
import datetime import decimal import gettext as gettext_module import os import pickle import re import tempfile from contextlib import contextmanager from importlib import import_module from pathlib import Path from unittest import mock from asgiref.local import Local from django import forms from django.apps import AppConfig from django.conf import settings from django.conf.locale import LANG_INFO from django.conf.urls.i18n import i18n_patterns from django.template import Context, Template from django.test import RequestFactory, SimpleTestCase, TestCase, override_settings from django.utils import translation from django.utils.formats import ( date_format, get_format, iter_format_modules, localize, localize_input, reset_format_cache, sanitize_separators, sanitize_strftime_format, time_format, ) from django.utils.numberformat import format as nformat from django.utils.safestring import SafeString, mark_safe from django.utils.translation import ( activate, check_for_language, deactivate, get_language, get_language_bidi, get_language_from_request, get_language_info, gettext, gettext_lazy, ngettext, ngettext_lazy, npgettext, npgettext_lazy, pgettext, round_away_from_one, to_language, to_locale, trans_null, trans_real, ) from django.utils.translation.reloader import ( translation_file_changed, watch_for_translation_changes, ) from .forms import CompanyForm, I18nForm, SelectDateForm from .models import Company, TestModel here = os.path.dirname(os.path.abspath(__file__)) extended_locale_paths = settings.LOCALE_PATHS + [ os.path.join(here, "other", "locale"), ] class AppModuleStub: def __init__(self, **kwargs): self.__dict__.update(kwargs) @contextmanager def patch_formats(lang, **settings): from django.utils.formats import _format_cache # Populate _format_cache with temporary values for key, value in settings.items(): _format_cache[(key, lang)] = value try: yield finally: reset_format_cache() class TranslationTests(SimpleTestCase): @translation.override("fr") def test_plural(self): """ Test plurals with ngettext. French differs from English in that 0 is singular. """ self.assertEqual( ngettext("%(num)d year", "%(num)d years", 0) % {"num": 0}, "0 année", ) self.assertEqual( ngettext("%(num)d year", "%(num)d years", 2) % {"num": 2}, "2 années", ) self.assertEqual( ngettext("%(size)d byte", "%(size)d bytes", 0) % {"size": 0}, "0 octet" ) self.assertEqual( ngettext("%(size)d byte", "%(size)d bytes", 2) % {"size": 2}, "2 octets" ) def test_plural_null(self): g = trans_null.ngettext self.assertEqual(g("%(num)d year", "%(num)d years", 0) % {"num": 0}, "0 years") self.assertEqual(g("%(num)d year", "%(num)d years", 1) % {"num": 1}, "1 year") self.assertEqual(g("%(num)d year", "%(num)d years", 2) % {"num": 2}, "2 years") @override_settings(LOCALE_PATHS=extended_locale_paths) @translation.override("fr") def test_multiple_plurals_per_language(self): """ Normally, French has 2 plurals. As other/locale/fr/LC_MESSAGES/django.po has a different plural equation with 3 plurals, this tests if those plural are honored. """ self.assertEqual(ngettext("%d singular", "%d plural", 0) % 0, "0 pluriel1") self.assertEqual(ngettext("%d singular", "%d plural", 1) % 1, "1 singulier") self.assertEqual(ngettext("%d singular", "%d plural", 2) % 2, "2 pluriel2") french = trans_real.catalog() # Internal _catalog can query subcatalogs (from different po files). self.assertEqual(french._catalog[("%d singular", 0)], "%d singulier") self.assertEqual(french._catalog[("%(num)d hour", 0)], "%(num)d heure") def test_override(self): activate("de") try: with translation.override("pl"): self.assertEqual(get_language(), "pl") self.assertEqual(get_language(), "de") with translation.override(None): self.assertIsNone(get_language()) with translation.override("pl"): pass self.assertIsNone(get_language()) self.assertEqual(get_language(), "de") finally: deactivate() def test_override_decorator(self): @translation.override("pl") def func_pl(): self.assertEqual(get_language(), "pl") @translation.override(None) def func_none(): self.assertIsNone(get_language()) try: activate("de") func_pl() self.assertEqual(get_language(), "de") func_none() self.assertEqual(get_language(), "de") finally: deactivate() def test_override_exit(self): """ The language restored is the one used when the function was called, not the one used when the decorator was initialized (#23381). """ activate("fr") @translation.override("pl") def func_pl(): pass deactivate() try: activate("en") func_pl() self.assertEqual(get_language(), "en") finally: deactivate() def test_lazy_objects(self): """ Format string interpolation should work with *_lazy objects. """ s = gettext_lazy("Add %(name)s") d = {"name": "Ringo"} self.assertEqual("Add Ringo", s % d) with translation.override("de", deactivate=True): self.assertEqual("Ringo hinzuf\xfcgen", s % d) with translation.override("pl"): self.assertEqual("Dodaj Ringo", s % d) # It should be possible to compare *_lazy objects. s1 = gettext_lazy("Add %(name)s") self.assertEqual(s, s1) s2 = gettext_lazy("Add %(name)s") s3 = gettext_lazy("Add %(name)s") self.assertEqual(s2, s3) self.assertEqual(s, s2) s4 = gettext_lazy("Some other string") self.assertNotEqual(s, s4) def test_lazy_pickle(self): s1 = gettext_lazy("test") self.assertEqual(str(s1), "test") s2 = pickle.loads(pickle.dumps(s1)) self.assertEqual(str(s2), "test") @override_settings(LOCALE_PATHS=extended_locale_paths) def test_ngettext_lazy(self): simple_with_format = ngettext_lazy("%d good result", "%d good results") simple_context_with_format = npgettext_lazy( "Exclamation", "%d good result", "%d good results" ) simple_without_format = ngettext_lazy("good result", "good results") with translation.override("de"): self.assertEqual(simple_with_format % 1, "1 gutes Resultat") self.assertEqual(simple_with_format % 4, "4 guten Resultate") self.assertEqual(simple_context_with_format % 1, "1 gutes Resultat!") self.assertEqual(simple_context_with_format % 4, "4 guten Resultate!") self.assertEqual(simple_without_format % 1, "gutes Resultat") self.assertEqual(simple_without_format % 4, "guten Resultate") complex_nonlazy = ngettext_lazy( "Hi %(name)s, %(num)d good result", "Hi %(name)s, %(num)d good results", 4 ) complex_deferred = ngettext_lazy( "Hi %(name)s, %(num)d good result", "Hi %(name)s, %(num)d good results", "num", ) complex_context_nonlazy = npgettext_lazy( "Greeting", "Hi %(name)s, %(num)d good result", "Hi %(name)s, %(num)d good results", 4, ) complex_context_deferred = npgettext_lazy( "Greeting", "Hi %(name)s, %(num)d good result", "Hi %(name)s, %(num)d good results", "num", ) with translation.override("de"): self.assertEqual( complex_nonlazy % {"num": 4, "name": "Jim"}, "Hallo Jim, 4 guten Resultate", ) self.assertEqual( complex_deferred % {"name": "Jim", "num": 1}, "Hallo Jim, 1 gutes Resultat", ) self.assertEqual( complex_deferred % {"name": "Jim", "num": 5}, "Hallo Jim, 5 guten Resultate", ) with self.assertRaisesMessage(KeyError, "Your dictionary lacks key"): complex_deferred % {"name": "Jim"} self.assertEqual( complex_context_nonlazy % {"num": 4, "name": "Jim"}, "Willkommen Jim, 4 guten Resultate", ) self.assertEqual( complex_context_deferred % {"name": "Jim", "num": 1}, "Willkommen Jim, 1 gutes Resultat", ) self.assertEqual( complex_context_deferred % {"name": "Jim", "num": 5}, "Willkommen Jim, 5 guten Resultate", ) with self.assertRaisesMessage(KeyError, "Your dictionary lacks key"): complex_context_deferred % {"name": "Jim"} @override_settings(LOCALE_PATHS=extended_locale_paths) def test_ngettext_lazy_format_style(self): simple_with_format = ngettext_lazy("{} good result", "{} good results") simple_context_with_format = npgettext_lazy( "Exclamation", "{} good result", "{} good results" ) with translation.override("de"): self.assertEqual(simple_with_format.format(1), "1 gutes Resultat") self.assertEqual(simple_with_format.format(4), "4 guten Resultate") self.assertEqual(simple_context_with_format.format(1), "1 gutes Resultat!") self.assertEqual(simple_context_with_format.format(4), "4 guten Resultate!") complex_nonlazy = ngettext_lazy( "Hi {name}, {num} good result", "Hi {name}, {num} good results", 4 ) complex_deferred = ngettext_lazy( "Hi {name}, {num} good result", "Hi {name}, {num} good results", "num" ) complex_context_nonlazy = npgettext_lazy( "Greeting", "Hi {name}, {num} good result", "Hi {name}, {num} good results", 4, ) complex_context_deferred = npgettext_lazy( "Greeting", "Hi {name}, {num} good result", "Hi {name}, {num} good results", "num", ) with translation.override("de"): self.assertEqual( complex_nonlazy.format(num=4, name="Jim"), "Hallo Jim, 4 guten Resultate", ) self.assertEqual( complex_deferred.format(name="Jim", num=1), "Hallo Jim, 1 gutes Resultat", ) self.assertEqual( complex_deferred.format(name="Jim", num=5), "Hallo Jim, 5 guten Resultate", ) with self.assertRaisesMessage(KeyError, "Your dictionary lacks key"): complex_deferred.format(name="Jim") self.assertEqual( complex_context_nonlazy.format(num=4, name="Jim"), "Willkommen Jim, 4 guten Resultate", ) self.assertEqual( complex_context_deferred.format(name="Jim", num=1), "Willkommen Jim, 1 gutes Resultat", ) self.assertEqual( complex_context_deferred.format(name="Jim", num=5), "Willkommen Jim, 5 guten Resultate", ) with self.assertRaisesMessage(KeyError, "Your dictionary lacks key"): complex_context_deferred.format(name="Jim") def test_ngettext_lazy_bool(self): self.assertTrue(ngettext_lazy("%d good result", "%d good results")) self.assertFalse(ngettext_lazy("", "")) def test_ngettext_lazy_pickle(self): s1 = ngettext_lazy("%d good result", "%d good results") self.assertEqual(s1 % 1, "1 good result") self.assertEqual(s1 % 8, "8 good results") s2 = pickle.loads(pickle.dumps(s1)) self.assertEqual(s2 % 1, "1 good result") self.assertEqual(s2 % 8, "8 good results") @override_settings(LOCALE_PATHS=extended_locale_paths) def test_pgettext(self): trans_real._active = Local() trans_real._translations = {} with translation.override("de"): self.assertEqual(pgettext("unexisting", "May"), "May") self.assertEqual(pgettext("month name", "May"), "Mai") self.assertEqual(pgettext("verb", "May"), "Kann") self.assertEqual( npgettext("search", "%d result", "%d results", 4) % 4, "4 Resultate" ) def test_empty_value(self): """Empty value must stay empty after being translated (#23196).""" with translation.override("de"): self.assertEqual("", gettext("")) s = mark_safe("") self.assertEqual(s, gettext(s)) @override_settings(LOCALE_PATHS=extended_locale_paths) def test_safe_status(self): """ Translating a string requiring no auto-escaping with gettext or pgettext shouldn't change the "safe" status. """ trans_real._active = Local() trans_real._translations = {} s1 = mark_safe("Password") s2 = mark_safe("May") with translation.override("de", deactivate=True): self.assertIs(type(gettext(s1)), SafeString) self.assertIs(type(pgettext("month name", s2)), SafeString) self.assertEqual("aPassword", SafeString("a") + s1) self.assertEqual("Passworda", s1 + SafeString("a")) self.assertEqual("Passworda", s1 + mark_safe("a")) self.assertEqual("aPassword", mark_safe("a") + s1) self.assertEqual("as", mark_safe("a") + mark_safe("s")) def test_maclines(self): """ Translations on files with Mac or DOS end of lines will be converted to unix EOF in .po catalogs. """ ca_translation = trans_real.translation("ca") ca_translation._catalog["Mac\nEOF\n"] = "Catalan Mac\nEOF\n" ca_translation._catalog["Win\nEOF\n"] = "Catalan Win\nEOF\n" with translation.override("ca", deactivate=True): self.assertEqual("Catalan Mac\nEOF\n", gettext("Mac\rEOF\r")) self.assertEqual("Catalan Win\nEOF\n", gettext("Win\r\nEOF\r\n")) def test_to_locale(self): tests = ( ("en", "en"), ("EN", "en"), ("en-us", "en_US"), ("EN-US", "en_US"), ("en_US", "en_US"), # With > 2 characters after the dash. ("sr-latn", "sr_Latn"), ("sr-LATN", "sr_Latn"), ("sr_Latn", "sr_Latn"), # 3-char language codes. ("ber-MA", "ber_MA"), ("BER-MA", "ber_MA"), ("BER_MA", "ber_MA"), ("ber_MA", "ber_MA"), # With private use subtag (x-informal). ("nl-nl-x-informal", "nl_NL-x-informal"), ("NL-NL-X-INFORMAL", "nl_NL-x-informal"), ("sr-latn-x-informal", "sr_Latn-x-informal"), ("SR-LATN-X-INFORMAL", "sr_Latn-x-informal"), ) for lang, locale in tests: with self.subTest(lang=lang): self.assertEqual(to_locale(lang), locale) def test_to_language(self): self.assertEqual(to_language("en_US"), "en-us") self.assertEqual(to_language("sr_Lat"), "sr-lat") def test_language_bidi(self): self.assertIs(get_language_bidi(), False) with translation.override(None): self.assertIs(get_language_bidi(), False) def test_language_bidi_null(self): self.assertIs(trans_null.get_language_bidi(), False) with override_settings(LANGUAGE_CODE="he"): self.assertIs(get_language_bidi(), True) class TranslationLoadingTests(SimpleTestCase): def setUp(self): """Clear translation state.""" self._old_language = get_language() self._old_translations = trans_real._translations deactivate() trans_real._translations = {} def tearDown(self): trans_real._translations = self._old_translations activate(self._old_language) @override_settings( USE_I18N=True, LANGUAGE_CODE="en", LANGUAGES=[ ("en", "English"), ("en-ca", "English (Canada)"), ("en-nz", "English (New Zealand)"), ("en-au", "English (Australia)"), ], LOCALE_PATHS=[os.path.join(here, "loading")], INSTALLED_APPS=["i18n.loading_app"], ) def test_translation_loading(self): """ "loading_app" does not have translations for all languages provided by "loading". Catalogs are merged correctly. """ tests = [ ("en", "local country person"), ("en_AU", "aussie"), ("en_NZ", "kiwi"), ("en_CA", "canuck"), ] # Load all relevant translations. for language, _ in tests: activate(language) # Catalogs are merged correctly. for language, nickname in tests: with self.subTest(language=language): activate(language) self.assertEqual(gettext("local country person"), nickname) class TranslationThreadSafetyTests(SimpleTestCase): def setUp(self): self._old_language = get_language() self._translations = trans_real._translations # here we rely on .split() being called inside the _fetch() # in trans_real.translation() class sideeffect_str(str): def split(self, *args, **kwargs): res = str.split(self, *args, **kwargs) trans_real._translations["en-YY"] = None return res trans_real._translations = {sideeffect_str("en-XX"): None} def tearDown(self): trans_real._translations = self._translations activate(self._old_language) def test_bug14894_translation_activate_thread_safety(self): translation_count = len(trans_real._translations) # May raise RuntimeError if translation.activate() isn't thread-safe. translation.activate("pl") # make sure sideeffect_str actually added a new translation self.assertLess(translation_count, len(trans_real._translations)) class FormattingTests(SimpleTestCase): def setUp(self): super().setUp() self.n = decimal.Decimal("66666.666") self.f = 99999.999 self.d = datetime.date(2009, 12, 31) self.dt = datetime.datetime(2009, 12, 31, 20, 50) self.t = datetime.time(10, 15, 48) self.long = 10000 self.ctxt = Context( { "n": self.n, "t": self.t, "d": self.d, "dt": self.dt, "f": self.f, "l": self.long, } ) def test_all_format_strings(self): all_locales = LANG_INFO.keys() some_date = datetime.date(2017, 10, 14) some_datetime = datetime.datetime(2017, 10, 14, 10, 23) for locale in all_locales: with self.subTest(locale=locale), translation.override(locale): self.assertIn( "2017", date_format(some_date) ) # Uses DATE_FORMAT by default self.assertIn( "23", time_format(some_datetime) ) # Uses TIME_FORMAT by default self.assertIn("2017", date_format(some_datetime, "DATETIME_FORMAT")) self.assertIn("2017", date_format(some_date, "YEAR_MONTH_FORMAT")) self.assertIn("14", date_format(some_date, "MONTH_DAY_FORMAT")) self.assertIn("2017", date_format(some_date, "SHORT_DATE_FORMAT")) self.assertIn( "2017", date_format(some_datetime, "SHORT_DATETIME_FORMAT"), ) def test_locale_independent(self): """ Localization of numbers """ with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual( "66666.66", nformat( self.n, decimal_sep=".", decimal_pos=2, grouping=3, thousand_sep="," ), ) self.assertEqual( "66666A6", nformat( self.n, decimal_sep="A", decimal_pos=1, grouping=1, thousand_sep="B" ), ) self.assertEqual( "66666", nformat( self.n, decimal_sep="X", decimal_pos=0, grouping=1, thousand_sep="Y" ), ) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual( "66,666.66", nformat( self.n, decimal_sep=".", decimal_pos=2, grouping=3, thousand_sep="," ), ) self.assertEqual( "6B6B6B6B6A6", nformat( self.n, decimal_sep="A", decimal_pos=1, grouping=1, thousand_sep="B" ), ) self.assertEqual( "-66666.6", nformat(-66666.666, decimal_sep=".", decimal_pos=1) ) self.assertEqual( "-66666.0", nformat(int("-66666"), decimal_sep=".", decimal_pos=1) ) self.assertEqual( "10000.0", nformat(self.long, decimal_sep=".", decimal_pos=1) ) self.assertEqual( "10,00,00,000.00", nformat( 100000000.00, decimal_sep=".", decimal_pos=2, grouping=(3, 2, 0), thousand_sep=",", ), ) self.assertEqual( "1,0,00,000,0000.00", nformat( 10000000000.00, decimal_sep=".", decimal_pos=2, grouping=(4, 3, 2, 1, 0), thousand_sep=",", ), ) self.assertEqual( "10000,00,000.00", nformat( 1000000000.00, decimal_sep=".", decimal_pos=2, grouping=(3, 2, -1), thousand_sep=",", ), ) # This unusual grouping/force_grouping combination may be triggered # by the intcomma filter. self.assertEqual( "10000", nformat( self.long, decimal_sep=".", decimal_pos=0, grouping=0, force_grouping=True, ), ) # date filter self.assertEqual( "31.12.2009 в 20:50", Template('{{ dt|date:"d.m.Y в H:i" }}').render(self.ctxt), ) self.assertEqual( "⌚ 10:15", Template('{{ t|time:"⌚ H:i" }}').render(self.ctxt) ) def test_false_like_locale_formats(self): """ The active locale's formats take precedence over the default settings even if they would be interpreted as False in a conditional test (e.g. 0 or empty string) (#16938). """ with translation.override("fr"): with self.settings(USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR="!"): self.assertEqual("\xa0", get_format("THOUSAND_SEPARATOR")) # Even a second time (after the format has been cached)... self.assertEqual("\xa0", get_format("THOUSAND_SEPARATOR")) with self.settings(FIRST_DAY_OF_WEEK=0): self.assertEqual(1, get_format("FIRST_DAY_OF_WEEK")) # Even a second time (after the format has been cached)... self.assertEqual(1, get_format("FIRST_DAY_OF_WEEK")) def test_l10n_enabled(self): self.maxDiff = 3000 # Catalan locale with translation.override("ca", deactivate=True): self.assertEqual(r"j E \d\e Y", get_format("DATE_FORMAT")) self.assertEqual(1, get_format("FIRST_DAY_OF_WEEK")) self.assertEqual(",", get_format("DECIMAL_SEPARATOR")) self.assertEqual("10:15", time_format(self.t)) self.assertEqual("31 desembre de 2009", date_format(self.d)) self.assertEqual("1 abril de 2009", date_format(datetime.date(2009, 4, 1))) self.assertEqual( "desembre del 2009", date_format(self.d, "YEAR_MONTH_FORMAT") ) self.assertEqual( "31/12/2009 20:50", date_format(self.dt, "SHORT_DATETIME_FORMAT") ) self.assertEqual("No localizable", localize("No localizable")) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual("66.666,666", localize(self.n)) self.assertEqual("99.999,999", localize(self.f)) self.assertEqual("10.000", localize(self.long)) self.assertEqual("True", localize(True)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual("66666,666", localize(self.n)) self.assertEqual("99999,999", localize(self.f)) self.assertEqual("10000", localize(self.long)) self.assertEqual("31 desembre de 2009", localize(self.d)) self.assertEqual("31 desembre de 2009 a les 20:50", localize(self.dt)) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual("66.666,666", Template("{{ n }}").render(self.ctxt)) self.assertEqual("99.999,999", Template("{{ f }}").render(self.ctxt)) self.assertEqual("10.000", Template("{{ l }}").render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=True): form3 = I18nForm( { "decimal_field": "66.666,666", "float_field": "99.999,999", "date_field": "31/12/2009", "datetime_field": "31/12/2009 20:50", "time_field": "20:50", "integer_field": "1.234", } ) self.assertTrue(form3.is_valid()) self.assertEqual( decimal.Decimal("66666.666"), form3.cleaned_data["decimal_field"] ) self.assertEqual(99999.999, form3.cleaned_data["float_field"]) self.assertEqual( datetime.date(2009, 12, 31), form3.cleaned_data["date_field"] ) self.assertEqual( datetime.datetime(2009, 12, 31, 20, 50), form3.cleaned_data["datetime_field"], ) self.assertEqual( datetime.time(20, 50), form3.cleaned_data["time_field"] ) self.assertEqual(1234, form3.cleaned_data["integer_field"]) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual("66666,666", Template("{{ n }}").render(self.ctxt)) self.assertEqual("99999,999", Template("{{ f }}").render(self.ctxt)) self.assertEqual( "31 desembre de 2009", Template("{{ d }}").render(self.ctxt) ) self.assertEqual( "31 desembre de 2009 a les 20:50", Template("{{ dt }}").render(self.ctxt), ) self.assertEqual( "66666,67", Template("{{ n|floatformat:2 }}").render(self.ctxt) ) self.assertEqual( "100000,0", Template("{{ f|floatformat }}").render(self.ctxt) ) self.assertEqual( "66.666,67", Template('{{ n|floatformat:"2g" }}').render(self.ctxt), ) self.assertEqual( "100.000,0", Template('{{ f|floatformat:"g" }}').render(self.ctxt), ) self.assertEqual( "10:15", Template('{{ t|time:"TIME_FORMAT" }}').render(self.ctxt) ) self.assertEqual( "31/12/2009", Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt), ) self.assertEqual( "31/12/2009 20:50", Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt), ) self.assertEqual( date_format(datetime.datetime.now()), Template('{% now "DATE_FORMAT" %}').render(self.ctxt), ) with self.settings(USE_THOUSAND_SEPARATOR=False): form4 = I18nForm( { "decimal_field": "66666,666", "float_field": "99999,999", "date_field": "31/12/2009", "datetime_field": "31/12/2009 20:50", "time_field": "20:50", "integer_field": "1234", } ) self.assertTrue(form4.is_valid()) self.assertEqual( decimal.Decimal("66666.666"), form4.cleaned_data["decimal_field"] ) self.assertEqual(99999.999, form4.cleaned_data["float_field"]) self.assertEqual( datetime.date(2009, 12, 31), form4.cleaned_data["date_field"] ) self.assertEqual( datetime.datetime(2009, 12, 31, 20, 50), form4.cleaned_data["datetime_field"], ) self.assertEqual( datetime.time(20, 50), form4.cleaned_data["time_field"] ) self.assertEqual(1234, form4.cleaned_data["integer_field"]) form5 = SelectDateForm( { "date_field_month": "12", "date_field_day": "31", "date_field_year": "2009", } ) self.assertTrue(form5.is_valid()) self.assertEqual( datetime.date(2009, 12, 31), form5.cleaned_data["date_field"] ) self.assertHTMLEqual( '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' "</select>" '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">gener</option>' '<option value="2">febrer</option>' '<option value="3">mar\xe7</option>' '<option value="4">abril</option>' '<option value="5">maig</option>' '<option value="6">juny</option>' '<option value="7">juliol</option>' '<option value="8">agost</option>' '<option value="9">setembre</option>' '<option value="10">octubre</option>' '<option value="11">novembre</option>' '<option value="12" selected>desembre</option>' "</select>" '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' "</select>", forms.SelectDateWidget(years=range(2009, 2019)).render( "mydate", datetime.date(2009, 12, 31) ), ) # Russian locale (with E as month) with translation.override("ru", deactivate=True): self.assertHTMLEqual( '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' "</select>" '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>' '<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>' '<option value="3">\u041c\u0430\u0440\u0442</option>' '<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>' '<option value="5">\u041c\u0430\u0439</option>' '<option value="6">\u0418\u044e\u043d\u044c</option>' '<option value="7">\u0418\u044e\u043b\u044c</option>' '<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>' '<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c' "</option>" '<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>' '<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>' '<option value="12" selected>\u0414\u0435\u043a\u0430\u0431\u0440\u044c' "</option>" "</select>" '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' "</select>", forms.SelectDateWidget(years=range(2009, 2019)).render( "mydate", datetime.date(2009, 12, 31) ), ) # English locale with translation.override("en", deactivate=True): self.assertEqual("N j, Y", get_format("DATE_FORMAT")) self.assertEqual(0, get_format("FIRST_DAY_OF_WEEK")) self.assertEqual(".", get_format("DECIMAL_SEPARATOR")) self.assertEqual("Dec. 31, 2009", date_format(self.d)) self.assertEqual("December 2009", date_format(self.d, "YEAR_MONTH_FORMAT")) self.assertEqual( "12/31/2009 8:50 p.m.", date_format(self.dt, "SHORT_DATETIME_FORMAT") ) self.assertEqual("No localizable", localize("No localizable")) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual("66,666.666", localize(self.n)) self.assertEqual("99,999.999", localize(self.f)) self.assertEqual("10,000", localize(self.long)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual("66666.666", localize(self.n)) self.assertEqual("99999.999", localize(self.f)) self.assertEqual("10000", localize(self.long)) self.assertEqual("Dec. 31, 2009", localize(self.d)) self.assertEqual("Dec. 31, 2009, 8:50 p.m.", localize(self.dt)) with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual("66,666.666", Template("{{ n }}").render(self.ctxt)) self.assertEqual("99,999.999", Template("{{ f }}").render(self.ctxt)) self.assertEqual("10,000", Template("{{ l }}").render(self.ctxt)) with self.settings(USE_THOUSAND_SEPARATOR=False): self.assertEqual("66666.666", Template("{{ n }}").render(self.ctxt)) self.assertEqual("99999.999", Template("{{ f }}").render(self.ctxt)) self.assertEqual("Dec. 31, 2009", Template("{{ d }}").render(self.ctxt)) self.assertEqual( "Dec. 31, 2009, 8:50 p.m.", Template("{{ dt }}").render(self.ctxt) ) self.assertEqual( "66666.67", Template("{{ n|floatformat:2 }}").render(self.ctxt) ) self.assertEqual( "100000.0", Template("{{ f|floatformat }}").render(self.ctxt) ) self.assertEqual( "66,666.67", Template('{{ n|floatformat:"2g" }}').render(self.ctxt), ) self.assertEqual( "100,000.0", Template('{{ f|floatformat:"g" }}').render(self.ctxt), ) self.assertEqual( "12/31/2009", Template('{{ d|date:"SHORT_DATE_FORMAT" }}').render(self.ctxt), ) self.assertEqual( "12/31/2009 8:50 p.m.", Template('{{ dt|date:"SHORT_DATETIME_FORMAT" }}').render(self.ctxt), ) form5 = I18nForm( { "decimal_field": "66666.666", "float_field": "99999.999", "date_field": "12/31/2009", "datetime_field": "12/31/2009 20:50", "time_field": "20:50", "integer_field": "1234", } ) self.assertTrue(form5.is_valid()) self.assertEqual( decimal.Decimal("66666.666"), form5.cleaned_data["decimal_field"] ) self.assertEqual(99999.999, form5.cleaned_data["float_field"]) self.assertEqual( datetime.date(2009, 12, 31), form5.cleaned_data["date_field"] ) self.assertEqual( datetime.datetime(2009, 12, 31, 20, 50), form5.cleaned_data["datetime_field"], ) self.assertEqual(datetime.time(20, 50), form5.cleaned_data["time_field"]) self.assertEqual(1234, form5.cleaned_data["integer_field"]) form6 = SelectDateForm( { "date_field_month": "12", "date_field_day": "31", "date_field_year": "2009", } ) self.assertTrue(form6.is_valid()) self.assertEqual( datetime.date(2009, 12, 31), form6.cleaned_data["date_field"] ) self.assertHTMLEqual( '<select name="mydate_month" id="id_mydate_month">' '<option value="">---</option>' '<option value="1">January</option>' '<option value="2">February</option>' '<option value="3">March</option>' '<option value="4">April</option>' '<option value="5">May</option>' '<option value="6">June</option>' '<option value="7">July</option>' '<option value="8">August</option>' '<option value="9">September</option>' '<option value="10">October</option>' '<option value="11">November</option>' '<option value="12" selected>December</option>' "</select>" '<select name="mydate_day" id="id_mydate_day">' '<option value="">---</option>' '<option value="1">1</option>' '<option value="2">2</option>' '<option value="3">3</option>' '<option value="4">4</option>' '<option value="5">5</option>' '<option value="6">6</option>' '<option value="7">7</option>' '<option value="8">8</option>' '<option value="9">9</option>' '<option value="10">10</option>' '<option value="11">11</option>' '<option value="12">12</option>' '<option value="13">13</option>' '<option value="14">14</option>' '<option value="15">15</option>' '<option value="16">16</option>' '<option value="17">17</option>' '<option value="18">18</option>' '<option value="19">19</option>' '<option value="20">20</option>' '<option value="21">21</option>' '<option value="22">22</option>' '<option value="23">23</option>' '<option value="24">24</option>' '<option value="25">25</option>' '<option value="26">26</option>' '<option value="27">27</option>' '<option value="28">28</option>' '<option value="29">29</option>' '<option value="30">30</option>' '<option value="31" selected>31</option>' "</select>" '<select name="mydate_year" id="id_mydate_year">' '<option value="">---</option>' '<option value="2009" selected>2009</option>' '<option value="2010">2010</option>' '<option value="2011">2011</option>' '<option value="2012">2012</option>' '<option value="2013">2013</option>' '<option value="2014">2014</option>' '<option value="2015">2015</option>' '<option value="2016">2016</option>' '<option value="2017">2017</option>' '<option value="2018">2018</option>' "</select>", forms.SelectDateWidget(years=range(2009, 2019)).render( "mydate", datetime.date(2009, 12, 31) ), ) def test_sub_locales(self): """ Check if sublocales fall back to the main locale """ with self.settings(USE_THOUSAND_SEPARATOR=True): with translation.override("de-at", deactivate=True): self.assertEqual("66.666,666", Template("{{ n }}").render(self.ctxt)) with translation.override("es-us", deactivate=True): self.assertEqual("31 de diciembre de 2009", date_format(self.d)) def test_localized_input(self): """ Tests if form input is correctly localized """ self.maxDiff = 1200 with translation.override("de-at", deactivate=True): form6 = CompanyForm( { "name": "acme", "date_added": datetime.datetime(2009, 12, 31, 6, 0, 0), "cents_paid": decimal.Decimal("59.47"), "products_delivered": 12000, } ) self.assertTrue(form6.is_valid()) self.assertHTMLEqual( form6.as_ul(), '<li><label for="id_name">Name:</label>' '<input id="id_name" type="text" name="name" value="acme" ' ' maxlength="50" required></li>' '<li><label for="id_date_added">Date added:</label>' '<input type="text" name="date_added" value="31.12.2009 06:00:00" ' ' id="id_date_added" required></li>' '<li><label for="id_cents_paid">Cents paid:</label>' '<input type="text" name="cents_paid" value="59,47" id="id_cents_paid" ' " required></li>" '<li><label for="id_products_delivered">Products delivered:</label>' '<input type="text" name="products_delivered" value="12000" ' ' id="id_products_delivered" required>' "</li>", ) self.assertEqual( localize_input(datetime.datetime(2009, 12, 31, 6, 0, 0)), "31.12.2009 06:00:00", ) self.assertEqual( datetime.datetime(2009, 12, 31, 6, 0, 0), form6.cleaned_data["date_added"], ) with self.settings(USE_THOUSAND_SEPARATOR=True): # Checking for the localized "products_delivered" field self.assertInHTML( '<input type="text" name="products_delivered" ' 'value="12.000" id="id_products_delivered" required>', form6.as_ul(), ) def test_localized_input_func(self): tests = ( (True, "True"), (datetime.date(1, 1, 1), "0001-01-01"), (datetime.datetime(1, 1, 1), "0001-01-01 00:00:00"), ) with self.settings(USE_THOUSAND_SEPARATOR=True): for value, expected in tests: with self.subTest(value=value): self.assertEqual(localize_input(value), expected) def test_sanitize_strftime_format(self): for year in (1, 99, 999, 1000): dt = datetime.date(year, 1, 1) for fmt, expected in [ ("%C", "%02d" % (year // 100)), ("%F", "%04d-01-01" % year), ("%G", "%04d" % year), ("%Y", "%04d" % year), ]: with self.subTest(year=year, fmt=fmt): fmt = sanitize_strftime_format(fmt) self.assertEqual(dt.strftime(fmt), expected) def test_sanitize_strftime_format_with_escaped_percent(self): dt = datetime.date(1, 1, 1) for fmt, expected in [ ("%%C", "%C"), ("%%F", "%F"), ("%%G", "%G"), ("%%Y", "%Y"), ("%%%%C", "%%C"), ("%%%%F", "%%F"), ("%%%%G", "%%G"), ("%%%%Y", "%%Y"), ]: with self.subTest(fmt=fmt): fmt = sanitize_strftime_format(fmt) self.assertEqual(dt.strftime(fmt), expected) for year in (1, 99, 999, 1000): dt = datetime.date(year, 1, 1) for fmt, expected in [ ("%%%C", "%%%02d" % (year // 100)), ("%%%F", "%%%04d-01-01" % year), ("%%%G", "%%%04d" % year), ("%%%Y", "%%%04d" % year), ("%%%%%C", "%%%%%02d" % (year // 100)), ("%%%%%F", "%%%%%04d-01-01" % year), ("%%%%%G", "%%%%%04d" % year), ("%%%%%Y", "%%%%%04d" % year), ]: with self.subTest(year=year, fmt=fmt): fmt = sanitize_strftime_format(fmt) self.assertEqual(dt.strftime(fmt), expected) def test_sanitize_separators(self): """ Tests django.utils.formats.sanitize_separators. """ # Non-strings are untouched self.assertEqual(sanitize_separators(123), 123) with translation.override("ru", deactivate=True): # Russian locale has non-breaking space (\xa0) as thousand separator # Usual space is accepted too when sanitizing inputs with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual(sanitize_separators("1\xa0234\xa0567"), "1234567") self.assertEqual(sanitize_separators("77\xa0777,777"), "77777.777") self.assertEqual(sanitize_separators("12 345"), "12345") self.assertEqual(sanitize_separators("77 777,777"), "77777.777") with translation.override(None): with self.settings(USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR="."): self.assertEqual(sanitize_separators("12\xa0345"), "12\xa0345") with self.settings(USE_THOUSAND_SEPARATOR=True): with patch_formats( get_language(), THOUSAND_SEPARATOR=".", DECIMAL_SEPARATOR="," ): self.assertEqual(sanitize_separators("10.234"), "10234") # Suspicion that user entered dot as decimal separator (#22171) self.assertEqual(sanitize_separators("10.10"), "10.10") with translation.override(None): with self.settings(DECIMAL_SEPARATOR=","): self.assertEqual(sanitize_separators("1001,10"), "1001.10") self.assertEqual(sanitize_separators("1001.10"), "1001.10") with self.settings( DECIMAL_SEPARATOR=",", THOUSAND_SEPARATOR=".", USE_THOUSAND_SEPARATOR=True, ): self.assertEqual(sanitize_separators("1.001,10"), "1001.10") self.assertEqual(sanitize_separators("1001,10"), "1001.10") self.assertEqual(sanitize_separators("1001.10"), "1001.10") # Invalid output. self.assertEqual(sanitize_separators("1,001.10"), "1.001.10") def test_iter_format_modules(self): """ Tests the iter_format_modules function. """ # Importing some format modules so that we can compare the returned # modules with these expected modules default_mod = import_module("django.conf.locale.de.formats") test_mod = import_module("i18n.other.locale.de.formats") test_mod2 = import_module("i18n.other2.locale.de.formats") with translation.override("de-at", deactivate=True): # Should return the correct default module when no setting is set self.assertEqual(list(iter_format_modules("de")), [default_mod]) # When the setting is a string, should return the given module and # the default module self.assertEqual( list(iter_format_modules("de", "i18n.other.locale")), [test_mod, default_mod], ) # When setting is a list of strings, should return the given # modules and the default module self.assertEqual( list( iter_format_modules( "de", ["i18n.other.locale", "i18n.other2.locale"] ) ), [test_mod, test_mod2, default_mod], ) def test_iter_format_modules_stability(self): """ Tests the iter_format_modules function always yields format modules in a stable and correct order in presence of both base ll and ll_CC formats. """ en_format_mod = import_module("django.conf.locale.en.formats") en_gb_format_mod = import_module("django.conf.locale.en_GB.formats") self.assertEqual( list(iter_format_modules("en-gb")), [en_gb_format_mod, en_format_mod] ) def test_get_format_modules_lang(self): with translation.override("de", deactivate=True): self.assertEqual(".", get_format("DECIMAL_SEPARATOR", lang="en")) def test_get_format_lazy_format(self): self.assertEqual(get_format(gettext_lazy("DATE_FORMAT")), "N j, Y") def test_localize_templatetag_and_filter(self): """ Test the {% localize %} templatetag and the localize/unlocalize filters. """ context = Context( {"int": 1455, "float": 3.14, "date": datetime.date(2016, 12, 31)} ) template1 = Template( "{% load l10n %}{% localize %}" "{{ int }}/{{ float }}/{{ date }}{% endlocalize %}; " "{% localize on %}{{ int }}/{{ float }}/{{ date }}{% endlocalize %}" ) template2 = Template( "{% load l10n %}{{ int }}/{{ float }}/{{ date }}; " "{% localize off %}{{ int }}/{{ float }}/{{ date }};{% endlocalize %} " "{{ int }}/{{ float }}/{{ date }}" ) template3 = Template( "{% load l10n %}{{ int }}/{{ float }}/{{ date }}; " "{{ int|unlocalize }}/{{ float|unlocalize }}/{{ date|unlocalize }}" ) expected_localized = "1.455/3,14/31. Dezember 2016" expected_unlocalized = "1455/3.14/Dez. 31, 2016" output1 = "; ".join([expected_localized, expected_localized]) output2 = "; ".join( [expected_localized, expected_unlocalized, expected_localized] ) output3 = "; ".join([expected_localized, expected_unlocalized]) with translation.override("de", deactivate=True): with self.settings(USE_THOUSAND_SEPARATOR=True): self.assertEqual(template1.render(context), output1) self.assertEqual(template2.render(context), output2) self.assertEqual(template3.render(context), output3) def test_localized_off_numbers(self): """A string representation is returned for unlocalized numbers.""" template = Template( "{% load l10n %}{% localize off %}" "{{ int }}/{{ float }}/{{ decimal }}{% endlocalize %}" ) context = Context( {"int": 1455, "float": 3.14, "decimal": decimal.Decimal("24.1567")} ) with self.settings( DECIMAL_SEPARATOR=",", USE_THOUSAND_SEPARATOR=True, THOUSAND_SEPARATOR="°", NUMBER_GROUPING=2, ): self.assertEqual(template.render(context), "1455/3.14/24.1567") def test_localized_as_text_as_hidden_input(self): """ Form input with 'as_hidden' or 'as_text' is correctly localized. """ self.maxDiff = 1200 with translation.override("de-at", deactivate=True): template = Template( "{% load l10n %}{{ form.date_added }}; {{ form.cents_paid }}" ) template_as_text = Template( "{% load l10n %}" "{{ form.date_added.as_text }}; {{ form.cents_paid.as_text }}" ) template_as_hidden = Template( "{% load l10n %}" "{{ form.date_added.as_hidden }}; {{ form.cents_paid.as_hidden }}" ) form = CompanyForm( { "name": "acme", "date_added": datetime.datetime(2009, 12, 31, 6, 0, 0), "cents_paid": decimal.Decimal("59.47"), "products_delivered": 12000, } ) context = Context({"form": form}) self.assertTrue(form.is_valid()) self.assertHTMLEqual( template.render(context), '<input id="id_date_added" name="date_added" type="text" ' 'value="31.12.2009 06:00:00" required>;' '<input id="id_cents_paid" name="cents_paid" type="text" value="59,47" ' "required>", ) self.assertHTMLEqual( template_as_text.render(context), '<input id="id_date_added" name="date_added" type="text" ' 'value="31.12.2009 06:00:00" required>;' '<input id="id_cents_paid" name="cents_paid" type="text" value="59,47" ' "required>", ) self.assertHTMLEqual( template_as_hidden.render(context), '<input id="id_date_added" name="date_added" type="hidden" ' 'value="31.12.2009 06:00:00">;' '<input id="id_cents_paid" name="cents_paid" type="hidden" ' 'value="59,47">', ) def test_format_arbitrary_settings(self): self.assertEqual(get_format("DEBUG"), "DEBUG") def test_get_custom_format(self): reset_format_cache() with self.settings(FORMAT_MODULE_PATH="i18n.other.locale"): with translation.override("fr", deactivate=True): self.assertEqual("d/m/Y CUSTOM", get_format("CUSTOM_DAY_FORMAT")) def test_admin_javascript_supported_input_formats(self): """ The first input format for DATE_INPUT_FORMATS, TIME_INPUT_FORMATS, and DATETIME_INPUT_FORMATS must not contain %f since that's unsupported by the admin's time picker widget. """ regex = re.compile("%([^BcdHImMpSwxXyY%])") for language_code, language_name in settings.LANGUAGES: for format_name in ( "DATE_INPUT_FORMATS", "TIME_INPUT_FORMATS", "DATETIME_INPUT_FORMATS", ): with self.subTest(language=language_code, format=format_name): formatter = get_format(format_name, lang=language_code)[0] self.assertEqual( regex.findall(formatter), [], "%s locale's %s uses an unsupported format code." % (language_code, format_name), ) class MiscTests(SimpleTestCase): rf = RequestFactory() @override_settings(LANGUAGE_CODE="de") def test_english_fallback(self): """ With a non-English LANGUAGE_CODE and if the active language is English or one of its variants, the untranslated string should be returned (instead of falling back to LANGUAGE_CODE) (See #24413). """ self.assertEqual(gettext("Image"), "Bild") with translation.override("en"): self.assertEqual(gettext("Image"), "Image") with translation.override("en-us"): self.assertEqual(gettext("Image"), "Image") with translation.override("en-ca"): self.assertEqual(gettext("Image"), "Image") def test_parse_spec_http_header(self): """ Testing HTTP header parsing. First, we test that we can parse the values according to the spec (and that we extract all the pieces in the right order). """ tests = [ # Good headers ("de", [("de", 1.0)]), ("en-AU", [("en-au", 1.0)]), ("es-419", [("es-419", 1.0)]), ("*;q=1.00", [("*", 1.0)]), ("en-AU;q=0.123", [("en-au", 0.123)]), ("en-au;q=0.5", [("en-au", 0.5)]), ("en-au;q=1.0", [("en-au", 1.0)]), ("da, en-gb;q=0.25, en;q=0.5", [("da", 1.0), ("en", 0.5), ("en-gb", 0.25)]), ("en-au-xx", [("en-au-xx", 1.0)]), ( "de,en-au;q=0.75,en-us;q=0.5,en;q=0.25,es;q=0.125,fa;q=0.125", [ ("de", 1.0), ("en-au", 0.75), ("en-us", 0.5), ("en", 0.25), ("es", 0.125), ("fa", 0.125), ], ), ("*", [("*", 1.0)]), ("de;q=0.", [("de", 0.0)]), ("en; q=1,", [("en", 1.0)]), ("en; q=1.0, * ; q=0.5", [("en", 1.0), ("*", 0.5)]), ( "en" + "-x" * 20, [("en-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x", 1.0)], ), ( ", ".join(["en; q=1.0"] * 20), [("en", 1.0)] * 20, ), # Bad headers ("en-gb;q=1.0000", []), ("en;q=0.1234", []), ("en;q=.2", []), ("abcdefghi-au", []), ("**", []), ("en,,gb", []), ("en-au;q=0.1.0", []), (("X" * 97) + "Z,en", []), ("da, en-gb;q=0.8, en;q=0.7,#", []), ("de;q=2.0", []), ("de;q=0.a", []), ("12-345", []), ("", []), ("en;q=1e0", []), ("en-au;q=1.0", []), # Invalid as language-range value too long. ("xxxxxxxx" + "-xxxxxxxx" * 500, []), # Header value too long, only parse up to limit. (", ".join(["en; q=1.0"] * 500), [("en", 1.0)] * 45), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual( trans_real.parse_accept_lang_header(value), tuple(expected) ) def test_parse_literal_http_header(self): tests = [ ("pt-br", "pt-br"), ("pt", "pt"), ("es,de", "es"), ("es-a,de", "es"), # There isn't a Django translation to a US variation of the Spanish # language, a safe assumption. When the user sets it as the # preferred language, the main 'es' translation should be selected # instead. ("es-us", "es"), # There isn't a main language (zh) translation of Django but there # is a translation to variation (zh-hans) the user sets zh-hans as # the preferred language, it should be selected without falling # back nor ignoring it. ("zh-hans,de", "zh-hans"), ("NL", "nl"), ("fy", "fy"), ("ia", "ia"), ("sr-latn", "sr-latn"), ("zh-hans", "zh-hans"), ("zh-hant", "zh-hant"), ] for header, expected in tests: with self.subTest(header=header): request = self.rf.get("/", headers={"accept-language": header}) self.assertEqual(get_language_from_request(request), expected) @override_settings( LANGUAGES=[ ("en", "English"), ("zh-hans", "Simplified Chinese"), ("zh-hant", "Traditional Chinese"), ] ) def test_support_for_deprecated_chinese_language_codes(self): """ Some browsers (Firefox, IE, etc.) use deprecated language codes. As these language codes will be removed in Django 1.9, these will be incorrectly matched. For example zh-tw (traditional) will be interpreted as zh-hans (simplified), which is wrong. So we should also accept these deprecated language codes. refs #18419 -- this is explicitly for browser compatibility """ g = get_language_from_request request = self.rf.get("/", headers={"accept-language": "zh-cn,en"}) self.assertEqual(g(request), "zh-hans") request = self.rf.get("/", headers={"accept-language": "zh-tw,en"}) self.assertEqual(g(request), "zh-hant") def test_special_fallback_language(self): """ Some languages may have special fallbacks that don't follow the simple 'fr-ca' -> 'fr' logic (notably Chinese codes). """ request = self.rf.get("/", headers={"accept-language": "zh-my,en"}) self.assertEqual(get_language_from_request(request), "zh-hans") def test_subsequent_code_fallback_language(self): """ Subsequent language codes should be used when the language code is not supported. """ tests = [ ("zh-Hans-CN", "zh-hans"), ("zh-hans-mo", "zh-hans"), ("zh-hans-HK", "zh-hans"), ("zh-Hant-HK", "zh-hant"), ("zh-hant-tw", "zh-hant"), ("zh-hant-SG", "zh-hant"), ] for value, expected in tests: with self.subTest(value=value): request = self.rf.get("/", headers={"accept-language": f"{value},en"}) self.assertEqual(get_language_from_request(request), expected) def test_parse_language_cookie(self): g = get_language_from_request request = self.rf.get("/") request.COOKIES[settings.LANGUAGE_COOKIE_NAME] = "pt-br" self.assertEqual("pt-br", g(request)) request.COOKIES[settings.LANGUAGE_COOKIE_NAME] = "pt" self.assertEqual("pt", g(request)) request = self.rf.get("/", headers={"accept-language": "de"}) request.COOKIES[settings.LANGUAGE_COOKIE_NAME] = "es" self.assertEqual("es", g(request)) # There isn't a Django translation to a US variation of the Spanish # language, a safe assumption. When the user sets it as the preferred # language, the main 'es' translation should be selected instead. request = self.rf.get("/") request.COOKIES[settings.LANGUAGE_COOKIE_NAME] = "es-us" self.assertEqual(g(request), "es") # There isn't a main language (zh) translation of Django but there is a # translation to variation (zh-hans) the user sets zh-hans as the # preferred language, it should be selected without falling back nor # ignoring it. request = self.rf.get("/", headers={"accept-language": "de"}) request.COOKIES[settings.LANGUAGE_COOKIE_NAME] = "zh-hans" self.assertEqual(g(request), "zh-hans") @override_settings( USE_I18N=True, LANGUAGES=[ ("en", "English"), ("ar-dz", "Algerian Arabic"), ("de", "German"), ("de-at", "Austrian German"), ("pt-BR", "Portuguese (Brazil)"), ], ) def test_get_supported_language_variant_real(self): g = trans_real.get_supported_language_variant self.assertEqual(g("en"), "en") self.assertEqual(g("en-gb"), "en") self.assertEqual(g("de"), "de") self.assertEqual(g("de-at"), "de-at") self.assertEqual(g("de-ch"), "de") self.assertEqual(g("pt-br"), "pt-br") self.assertEqual(g("pt-BR"), "pt-BR") self.assertEqual(g("pt"), "pt-br") self.assertEqual(g("pt-pt"), "pt-br") self.assertEqual(g("ar-dz"), "ar-dz") self.assertEqual(g("ar-DZ"), "ar-DZ") with self.assertRaises(LookupError): g("pt", strict=True) with self.assertRaises(LookupError): g("pt-pt", strict=True) with self.assertRaises(LookupError): g("xyz") with self.assertRaises(LookupError): g("xy-zz") def test_get_supported_language_variant_null(self): g = trans_null.get_supported_language_variant self.assertEqual(g(settings.LANGUAGE_CODE), settings.LANGUAGE_CODE) with self.assertRaises(LookupError): g("pt") with self.assertRaises(LookupError): g("de") with self.assertRaises(LookupError): g("de-at") with self.assertRaises(LookupError): g("de", strict=True) with self.assertRaises(LookupError): g("de-at", strict=True) with self.assertRaises(LookupError): g("xyz") @override_settings( LANGUAGES=[ ("en", "English"), ("en-latn-us", "Latin English"), ("de", "German"), ("de-1996", "German, orthography of 1996"), ("de-at", "Austrian German"), ("de-ch-1901", "German, Swiss variant, traditional orthography"), ("i-mingo", "Mingo"), ("kl-tunumiit", "Tunumiisiut"), ("nan-hani-tw", "Hanji"), ("pl", "Polish"), ], ) def test_get_language_from_path_real(self): g = trans_real.get_language_from_path tests = [ ("/pl/", "pl"), ("/pl", "pl"), ("/xyz/", None), ("/en/", "en"), ("/en-gb/", "en"), ("/en-latn-us/", "en-latn-us"), ("/en-Latn-US/", "en-Latn-US"), ("/de/", "de"), ("/de-1996/", "de-1996"), ("/de-at/", "de-at"), ("/de-AT/", "de-AT"), ("/de-ch/", "de"), ("/de-ch-1901/", "de-ch-1901"), ("/de-simple-page-test/", None), ("/i-mingo/", "i-mingo"), ("/kl-tunumiit/", "kl-tunumiit"), ("/nan-hani-tw/", "nan-hani-tw"), ] for path, language in tests: with self.subTest(path=path): self.assertEqual(g(path), language) def test_get_language_from_path_null(self): g = trans_null.get_language_from_path self.assertIsNone(g("/pl/")) self.assertIsNone(g("/pl")) self.assertIsNone(g("/xyz/")) def test_cache_resetting(self): """ After setting LANGUAGE, the cache should be cleared and languages previously valid should not be used (#14170). """ g = get_language_from_request request = self.rf.get("/", headers={"accept-language": "pt-br"}) self.assertEqual("pt-br", g(request)) with self.settings(LANGUAGES=[("en", "English")]): self.assertNotEqual("pt-br", g(request)) def test_i18n_patterns_returns_list(self): with override_settings(USE_I18N=False): self.assertIsInstance(i18n_patterns([]), list) with override_settings(USE_I18N=True): self.assertIsInstance(i18n_patterns([]), list) class ResolutionOrderI18NTests(SimpleTestCase): def setUp(self): super().setUp() activate("de") def tearDown(self): deactivate() super().tearDown() def assertGettext(self, msgid, msgstr): result = gettext(msgid) self.assertIn( msgstr, result, "The string '%s' isn't in the translation of '%s'; the actual result is " "'%s'." % (msgstr, msgid, result), ) class AppResolutionOrderI18NTests(ResolutionOrderI18NTests): @override_settings(LANGUAGE_CODE="de") def test_app_translation(self): # Original translation. self.assertGettext("Date/time", "Datum/Zeit") # Different translation. with self.modify_settings(INSTALLED_APPS={"append": "i18n.resolution"}): # Force refreshing translations. activate("de") # Doesn't work because it's added later in the list. self.assertGettext("Date/time", "Datum/Zeit") with self.modify_settings( INSTALLED_APPS={"remove": "django.contrib.admin.apps.SimpleAdminConfig"} ): # Force refreshing translations. activate("de") # Unless the original is removed from the list. self.assertGettext("Date/time", "Datum/Zeit (APP)") @override_settings(LOCALE_PATHS=extended_locale_paths) class LocalePathsResolutionOrderI18NTests(ResolutionOrderI18NTests): def test_locale_paths_translation(self): self.assertGettext("Time", "LOCALE_PATHS") def test_locale_paths_override_app_translation(self): with self.settings(INSTALLED_APPS=["i18n.resolution"]): self.assertGettext("Time", "LOCALE_PATHS") class DjangoFallbackResolutionOrderI18NTests(ResolutionOrderI18NTests): def test_django_fallback(self): self.assertEqual(gettext("Date/time"), "Datum/Zeit") @override_settings(INSTALLED_APPS=["i18n.territorial_fallback"]) class TranslationFallbackI18NTests(ResolutionOrderI18NTests): def test_sparse_territory_catalog(self): """ Untranslated strings for territorial language variants use the translations of the generic language. In this case, the de-de translation falls back to de. """ with translation.override("de-de"): self.assertGettext("Test 1 (en)", "(de-de)") self.assertGettext("Test 2 (en)", "(de)") class TestModels(TestCase): def test_lazy(self): tm = TestModel() tm.save() def test_safestr(self): c = Company(cents_paid=12, products_delivered=1) c.name = SafeString("Iñtërnâtiônàlizætiøn1") c.save() class TestLanguageInfo(SimpleTestCase): def test_localized_language_info(self): li = get_language_info("de") self.assertEqual(li["code"], "de") self.assertEqual(li["name_local"], "Deutsch") self.assertEqual(li["name"], "German") self.assertIs(li["bidi"], False) def test_unknown_language_code(self): with self.assertRaisesMessage(KeyError, "Unknown language code xx"): get_language_info("xx") with translation.override("xx"): # A language with no translation catalogs should fallback to the # untranslated string. self.assertEqual(gettext("Title"), "Title") def test_unknown_only_country_code(self): li = get_language_info("de-xx") self.assertEqual(li["code"], "de") self.assertEqual(li["name_local"], "Deutsch") self.assertEqual(li["name"], "German") self.assertIs(li["bidi"], False) def test_unknown_language_code_and_country_code(self): with self.assertRaisesMessage(KeyError, "Unknown language code xx-xx and xx"): get_language_info("xx-xx") def test_fallback_language_code(self): """ get_language_info return the first fallback language info if the lang_info struct does not contain the 'name' key. """ li = get_language_info("zh-my") self.assertEqual(li["code"], "zh-hans") li = get_language_info("zh-hans") self.assertEqual(li["code"], "zh-hans") @override_settings( USE_I18N=True, LANGUAGES=[ ("en", "English"), ("fr", "French"), ], MIDDLEWARE=[ "django.middleware.locale.LocaleMiddleware", "django.middleware.common.CommonMiddleware", ], ROOT_URLCONF="i18n.urls", ) class LocaleMiddlewareTests(TestCase): def test_streaming_response(self): # Regression test for #5241 response = self.client.get("/fr/streaming/") self.assertContains(response, "Oui/Non") response = self.client.get("/en/streaming/") self.assertContains(response, "Yes/No") @override_settings( USE_I18N=True, LANGUAGES=[ ("en", "English"), ("de", "German"), ("fr", "French"), ], MIDDLEWARE=[ "django.middleware.locale.LocaleMiddleware", "django.middleware.common.CommonMiddleware", ], ROOT_URLCONF="i18n.urls_default_unprefixed", LANGUAGE_CODE="en", ) class UnprefixedDefaultLanguageTests(SimpleTestCase): def test_default_lang_without_prefix(self): """ With i18n_patterns(..., prefix_default_language=False), the default language (settings.LANGUAGE_CODE) should be accessible without a prefix. """ response = self.client.get("/simple/") self.assertEqual(response.content, b"Yes") def test_other_lang_with_prefix(self): response = self.client.get("/fr/simple/") self.assertEqual(response.content, b"Oui") def test_unprefixed_language_with_accept_language(self): """'Accept-Language' is respected.""" response = self.client.get("/simple/", headers={"accept-language": "fr"}) self.assertRedirects(response, "/fr/simple/") def test_unprefixed_language_with_cookie_language(self): """A language set in the cookies is respected.""" self.client.cookies.load({settings.LANGUAGE_COOKIE_NAME: "fr"}) response = self.client.get("/simple/") self.assertRedirects(response, "/fr/simple/") def test_unprefixed_language_with_non_valid_language(self): response = self.client.get("/simple/", headers={"accept-language": "fi"}) self.assertEqual(response.content, b"Yes") self.client.cookies.load({settings.LANGUAGE_COOKIE_NAME: "fi"}) response = self.client.get("/simple/") self.assertEqual(response.content, b"Yes") def test_page_with_dash(self): # A page starting with /de* shouldn't match the 'de' language code. response = self.client.get("/de-simple-page-test/") self.assertEqual(response.content, b"Yes") def test_no_redirect_on_404(self): """ A request for a nonexistent URL shouldn't cause a redirect to /<default_language>/<request_url> when prefix_default_language=False and /<default_language>/<request_url> has a URL match (#27402). """ # A match for /group1/group2/ must exist for this to act as a # regression test. response = self.client.get("/group1/group2/") self.assertEqual(response.status_code, 200) response = self.client.get("/nonexistent/") self.assertEqual(response.status_code, 404) @override_settings( USE_I18N=True, LANGUAGES=[ ("bg", "Bulgarian"), ("en-us", "English"), ("pt-br", "Portuguese (Brazil)"), ], MIDDLEWARE=[ "django.middleware.locale.LocaleMiddleware", "django.middleware.common.CommonMiddleware", ], ROOT_URLCONF="i18n.urls", ) class CountrySpecificLanguageTests(SimpleTestCase): rf = RequestFactory() def test_check_for_language(self): self.assertTrue(check_for_language("en")) self.assertTrue(check_for_language("en-us")) self.assertTrue(check_for_language("en-US")) self.assertFalse(check_for_language("en_US")) self.assertTrue(check_for_language("be")) self.assertTrue(check_for_language("be@latin")) self.assertTrue(check_for_language("sr-RS@latin")) self.assertTrue(check_for_language("sr-RS@12345")) self.assertFalse(check_for_language("en-ü")) self.assertFalse(check_for_language("en\x00")) self.assertFalse(check_for_language(None)) self.assertFalse(check_for_language("be@ ")) # Specifying encoding is not supported (Django enforces UTF-8) self.assertFalse(check_for_language("tr-TR.UTF-8")) self.assertFalse(check_for_language("tr-TR.UTF8")) self.assertFalse(check_for_language("de-DE.utf-8")) def test_check_for_language_null(self): self.assertIs(trans_null.check_for_language("en"), True) def test_get_language_from_request(self): # issue 19919 request = self.rf.get( "/", headers={"accept-language": "en-US,en;q=0.8,bg;q=0.6,ru;q=0.4"} ) lang = get_language_from_request(request) self.assertEqual("en-us", lang) request = self.rf.get( "/", headers={"accept-language": "bg-bg,en-US;q=0.8,en;q=0.6,ru;q=0.4"} ) lang = get_language_from_request(request) self.assertEqual("bg", lang) def test_get_language_from_request_null(self): lang = trans_null.get_language_from_request(None) self.assertEqual(lang, None) def test_specific_language_codes(self): # issue 11915 request = self.rf.get( "/", headers={"accept-language": "pt,en-US;q=0.8,en;q=0.6,ru;q=0.4"} ) lang = get_language_from_request(request) self.assertEqual("pt-br", lang) request = self.rf.get( "/", headers={"accept-language": "pt-pt,en-US;q=0.8,en;q=0.6,ru;q=0.4"} ) lang = get_language_from_request(request) self.assertEqual("pt-br", lang) class TranslationFilesMissing(SimpleTestCase): def setUp(self): super().setUp() self.gettext_find_builtin = gettext_module.find def tearDown(self): gettext_module.find = self.gettext_find_builtin super().tearDown() def patchGettextFind(self): gettext_module.find = lambda *args, **kw: None def test_failure_finding_default_mo_files(self): """OSError is raised if the default language is unparseable.""" self.patchGettextFind() trans_real._translations = {} with self.assertRaises(OSError): activate("en") class NonDjangoLanguageTests(SimpleTestCase): """ A language non present in default Django languages can still be installed/used by a Django project. """ @override_settings( USE_I18N=True, LANGUAGES=[ ("en-us", "English"), ("xxx", "Somelanguage"), ], LANGUAGE_CODE="xxx", LOCALE_PATHS=[os.path.join(here, "commands", "locale")], ) def test_non_django_language(self): self.assertEqual(get_language(), "xxx") self.assertEqual(gettext("year"), "reay") @override_settings(USE_I18N=True) def test_check_for_language(self): with tempfile.TemporaryDirectory() as app_dir: os.makedirs(os.path.join(app_dir, "locale", "dummy_Lang", "LC_MESSAGES")) open( os.path.join( app_dir, "locale", "dummy_Lang", "LC_MESSAGES", "django.mo" ), "w", ).close() app_config = AppConfig("dummy_app", AppModuleStub(__path__=[app_dir])) with mock.patch( "django.apps.apps.get_app_configs", return_value=[app_config] ): self.assertIs(check_for_language("dummy-lang"), True) @override_settings( USE_I18N=True, LANGUAGES=[ ("en-us", "English"), # xyz language has no locale files ("xyz", "XYZ"), ], ) @translation.override("xyz") def test_plural_non_django_language(self): self.assertEqual(get_language(), "xyz") self.assertEqual(ngettext("year", "years", 2), "years") @override_settings(USE_I18N=True) class WatchForTranslationChangesTests(SimpleTestCase): @override_settings(USE_I18N=False) def test_i18n_disabled(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_not_called() def test_i18n_enabled(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) self.assertGreater(mocked_sender.watch_dir.call_count, 1) def test_i18n_locale_paths(self): mocked_sender = mock.MagicMock() with tempfile.TemporaryDirectory() as app_dir: with self.settings(LOCALE_PATHS=[app_dir]): watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_any_call(Path(app_dir), "**/*.mo") def test_i18n_app_dirs(self): mocked_sender = mock.MagicMock() with self.settings(INSTALLED_APPS=["i18n.sampleproject"]): watch_for_translation_changes(mocked_sender) project_dir = Path(__file__).parent / "sampleproject" / "locale" mocked_sender.watch_dir.assert_any_call(project_dir, "**/*.mo") def test_i18n_app_dirs_ignore_django_apps(self): mocked_sender = mock.MagicMock() with self.settings(INSTALLED_APPS=["django.contrib.admin"]): watch_for_translation_changes(mocked_sender) mocked_sender.watch_dir.assert_called_once_with(Path("locale"), "**/*.mo") def test_i18n_local_locale(self): mocked_sender = mock.MagicMock() watch_for_translation_changes(mocked_sender) locale_dir = Path(__file__).parent / "locale" mocked_sender.watch_dir.assert_any_call(locale_dir, "**/*.mo") class TranslationFileChangedTests(SimpleTestCase): def setUp(self): self.gettext_translations = gettext_module._translations.copy() self.trans_real_translations = trans_real._translations.copy() def tearDown(self): gettext._translations = self.gettext_translations trans_real._translations = self.trans_real_translations def test_ignores_non_mo_files(self): gettext_module._translations = {"foo": "bar"} path = Path("test.py") self.assertIsNone(translation_file_changed(None, path)) self.assertEqual(gettext_module._translations, {"foo": "bar"}) def test_resets_cache_with_mo_files(self): gettext_module._translations = {"foo": "bar"} trans_real._translations = {"foo": "bar"} trans_real._default = 1 trans_real._active = False path = Path("test.mo") self.assertIs(translation_file_changed(None, path), True) self.assertEqual(gettext_module._translations, {}) self.assertEqual(trans_real._translations, {}) self.assertIsNone(trans_real._default) self.assertIsInstance(trans_real._active, Local) class UtilsTests(SimpleTestCase): def test_round_away_from_one(self): tests = [ (0, 0), (0.0, 0), (0.25, 0), (0.5, 0), (0.75, 0), (1, 1), (1.0, 1), (1.25, 2), (1.5, 2), (1.75, 2), (-0.0, 0), (-0.25, -1), (-0.5, -1), (-0.75, -1), (-1, -1), (-1.0, -1), (-1.25, -2), (-1.5, -2), (-1.75, -2), ] for value, expected in tests: with self.subTest(value=value): self.assertEqual(round_away_from_one(value), expected)
d27dd04995c8c1e8d00c09be11c4da463c0711ee27acee2e55d7b3d42a303200
import gettext as gettext_module import os import stat import unittest from io import StringIO from pathlib import Path from subprocess import run from unittest import mock from django.core.management import CommandError, call_command, execute_from_command_line from django.core.management.commands.makemessages import Command as MakeMessagesCommand from django.core.management.utils import find_command from django.test import SimpleTestCase, override_settings from django.test.utils import captured_stderr, captured_stdout from django.utils import translation from django.utils.translation import gettext from .utils import RunInTmpDirMixin, copytree has_msgfmt = find_command("msgfmt") @unittest.skipUnless(has_msgfmt, "msgfmt is mandatory for compilation tests") class MessageCompilationTests(RunInTmpDirMixin, SimpleTestCase): work_subdir = "commands" class PoFileTests(MessageCompilationTests): LOCALE = "es_AR" MO_FILE = "locale/%s/LC_MESSAGES/django.mo" % LOCALE MO_FILE_EN = "locale/en/LC_MESSAGES/django.mo" def test_bom_rejection(self): stderr = StringIO() with self.assertRaisesMessage( CommandError, "compilemessages generated one or more errors." ): call_command( "compilemessages", locale=[self.LOCALE], verbosity=0, stderr=stderr ) self.assertIn("file has a BOM (Byte Order Mark)", stderr.getvalue()) self.assertFalse(os.path.exists(self.MO_FILE)) def test_no_write_access(self): mo_file_en = Path(self.MO_FILE_EN) err_buffer = StringIO() # Put file in read-only mode. old_mode = mo_file_en.stat().st_mode mo_file_en.chmod(stat.S_IREAD) # Ensure .po file is more recent than .mo file. mo_file_en.with_suffix(".po").touch() try: with self.assertRaisesMessage( CommandError, "compilemessages generated one or more errors." ): call_command( "compilemessages", locale=["en"], stderr=err_buffer, verbosity=0 ) self.assertIn("not writable location", err_buffer.getvalue()) finally: mo_file_en.chmod(old_mode) def test_no_compile_when_unneeded(self): mo_file_en = Path(self.MO_FILE_EN) mo_file_en.touch() stdout = StringIO() call_command("compilemessages", locale=["en"], stdout=stdout, verbosity=1) msg = "%s” is already compiled and up to date." % mo_file_en.with_suffix(".po") self.assertIn(msg, stdout.getvalue()) class PoFileContentsTests(MessageCompilationTests): # Ticket #11240 LOCALE = "fr" MO_FILE = "locale/%s/LC_MESSAGES/django.mo" % LOCALE def test_percent_symbol_in_po_file(self): call_command("compilemessages", locale=[self.LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.MO_FILE)) class MultipleLocaleCompilationTests(MessageCompilationTests): MO_FILE_HR = None MO_FILE_FR = None def setUp(self): super().setUp() localedir = os.path.join(self.test_dir, "locale") self.MO_FILE_HR = os.path.join(localedir, "hr/LC_MESSAGES/django.mo") self.MO_FILE_FR = os.path.join(localedir, "fr/LC_MESSAGES/django.mo") def test_one_locale(self): with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, "locale")]): call_command("compilemessages", locale=["hr"], verbosity=0) self.assertTrue(os.path.exists(self.MO_FILE_HR)) def test_multiple_locales(self): with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, "locale")]): call_command("compilemessages", locale=["hr", "fr"], verbosity=0) self.assertTrue(os.path.exists(self.MO_FILE_HR)) self.assertTrue(os.path.exists(self.MO_FILE_FR)) class ExcludedLocaleCompilationTests(MessageCompilationTests): work_subdir = "exclude" MO_FILE = "locale/%s/LC_MESSAGES/django.mo" def setUp(self): super().setUp() copytree("canned_locale", "locale") def test_command_help(self): with captured_stdout(), captured_stderr(): # `call_command` bypasses the parser; by calling # `execute_from_command_line` with the help subcommand we # ensure that there are no issues with the parser itself. execute_from_command_line(["django-admin", "help", "compilemessages"]) def test_one_locale_excluded(self): call_command("compilemessages", exclude=["it"], verbosity=0) self.assertTrue(os.path.exists(self.MO_FILE % "en")) self.assertTrue(os.path.exists(self.MO_FILE % "fr")) self.assertFalse(os.path.exists(self.MO_FILE % "it")) def test_multiple_locales_excluded(self): call_command("compilemessages", exclude=["it", "fr"], verbosity=0) self.assertTrue(os.path.exists(self.MO_FILE % "en")) self.assertFalse(os.path.exists(self.MO_FILE % "fr")) self.assertFalse(os.path.exists(self.MO_FILE % "it")) def test_one_locale_excluded_with_locale(self): call_command( "compilemessages", locale=["en", "fr"], exclude=["fr"], verbosity=0 ) self.assertTrue(os.path.exists(self.MO_FILE % "en")) self.assertFalse(os.path.exists(self.MO_FILE % "fr")) self.assertFalse(os.path.exists(self.MO_FILE % "it")) def test_multiple_locales_excluded_with_locale(self): call_command( "compilemessages", locale=["en", "fr", "it"], exclude=["fr", "it"], verbosity=0, ) self.assertTrue(os.path.exists(self.MO_FILE % "en")) self.assertFalse(os.path.exists(self.MO_FILE % "fr")) self.assertFalse(os.path.exists(self.MO_FILE % "it")) class IgnoreDirectoryCompilationTests(MessageCompilationTests): # Reuse the exclude directory since it contains some locale fixtures. work_subdir = "exclude" MO_FILE = "%s/%s/LC_MESSAGES/django.mo" CACHE_DIR = Path("cache") / "locale" NESTED_DIR = Path("outdated") / "v1" / "locale" def setUp(self): super().setUp() copytree("canned_locale", "locale") copytree("canned_locale", self.CACHE_DIR) copytree("canned_locale", self.NESTED_DIR) def assertAllExist(self, dir, langs): self.assertTrue( all(Path(self.MO_FILE % (dir, lang)).exists() for lang in langs) ) def assertNoneExist(self, dir, langs): self.assertTrue( all(Path(self.MO_FILE % (dir, lang)).exists() is False for lang in langs) ) def test_one_locale_dir_ignored(self): call_command("compilemessages", ignore=["cache"], verbosity=0) self.assertAllExist("locale", ["en", "fr", "it"]) self.assertNoneExist(self.CACHE_DIR, ["en", "fr", "it"]) self.assertAllExist(self.NESTED_DIR, ["en", "fr", "it"]) def test_multiple_locale_dirs_ignored(self): call_command( "compilemessages", ignore=["cache/locale", "outdated"], verbosity=0 ) self.assertAllExist("locale", ["en", "fr", "it"]) self.assertNoneExist(self.CACHE_DIR, ["en", "fr", "it"]) self.assertNoneExist(self.NESTED_DIR, ["en", "fr", "it"]) def test_ignores_based_on_pattern(self): call_command("compilemessages", ignore=["*/locale"], verbosity=0) self.assertAllExist("locale", ["en", "fr", "it"]) self.assertNoneExist(self.CACHE_DIR, ["en", "fr", "it"]) self.assertNoneExist(self.NESTED_DIR, ["en", "fr", "it"]) class CompilationErrorHandling(MessageCompilationTests): def test_error_reported_by_msgfmt(self): # po file contains wrong po formatting. with self.assertRaises(CommandError): call_command("compilemessages", locale=["ja"], verbosity=0) def test_msgfmt_error_including_non_ascii(self): # po file contains invalid msgstr content (triggers non-ascii error content). # Make sure the output of msgfmt is unaffected by the current locale. env = os.environ.copy() env.update({"LC_ALL": "C"}) with mock.patch( "django.core.management.utils.run", lambda *args, **kwargs: run(*args, env=env, **kwargs), ): cmd = MakeMessagesCommand() if cmd.gettext_version < (0, 18, 3): self.skipTest("python-brace-format is a recent gettext addition.") stderr = StringIO() with self.assertRaisesMessage( CommandError, "compilemessages generated one or more errors" ): call_command( "compilemessages", locale=["ko"], stdout=StringIO(), stderr=stderr ) self.assertIn("' cannot start a field name", stderr.getvalue()) class ProjectAndAppTests(MessageCompilationTests): LOCALE = "ru" PROJECT_MO_FILE = "locale/%s/LC_MESSAGES/django.mo" % LOCALE APP_MO_FILE = "app_with_locale/locale/%s/LC_MESSAGES/django.mo" % LOCALE class FuzzyTranslationTest(ProjectAndAppTests): def setUp(self): super().setUp() gettext_module._translations = {} # flush cache or test will be useless def test_nofuzzy_compiling(self): with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, "locale")]): call_command("compilemessages", locale=[self.LOCALE], verbosity=0) with translation.override(self.LOCALE): self.assertEqual(gettext("Lenin"), "Ленин") self.assertEqual(gettext("Vodka"), "Vodka") def test_fuzzy_compiling(self): with override_settings(LOCALE_PATHS=[os.path.join(self.test_dir, "locale")]): call_command( "compilemessages", locale=[self.LOCALE], fuzzy=True, verbosity=0 ) with translation.override(self.LOCALE): self.assertEqual(gettext("Lenin"), "Ленин") self.assertEqual(gettext("Vodka"), "Водка") class AppCompilationTest(ProjectAndAppTests): def test_app_locale_compiled(self): call_command("compilemessages", locale=[self.LOCALE], verbosity=0) self.assertTrue(os.path.exists(self.PROJECT_MO_FILE)) self.assertTrue(os.path.exists(self.APP_MO_FILE)) class PathLibLocaleCompilationTests(MessageCompilationTests): work_subdir = "exclude" def test_locale_paths_pathlib(self): with override_settings(LOCALE_PATHS=[Path(self.test_dir) / "canned_locale"]): call_command("compilemessages", locale=["fr"], verbosity=0) self.assertTrue(os.path.exists("canned_locale/fr/LC_MESSAGES/django.mo"))
eb03c0cab48fb136984ff5b44d2dd7fa78d16979b5702c5238e9e3eba1eee3d5
from operator import attrgetter from django.contrib.contenttypes.models import ContentType from django.db import models from django.db.models import Count from django.test import TestCase from .models import ( Base, Child, Derived, Feature, Item, ItemAndSimpleItem, Leaf, Location, OneToOneItem, Proxy, ProxyRelated, RelatedItem, Request, ResolveThis, SimpleItem, SpecialFeature, ) class DeferRegressionTest(TestCase): def test_basic(self): # Deferred fields should really be deferred and not accidentally use # the field's default value just because they aren't passed to __init__ Item.objects.create(name="first", value=42) obj = Item.objects.only("name", "other_value").get(name="first") # Accessing "name" doesn't trigger a new database query. Accessing # "value" or "text" should. with self.assertNumQueries(0): self.assertEqual(obj.name, "first") self.assertEqual(obj.other_value, 0) with self.assertNumQueries(1): self.assertEqual(obj.value, 42) with self.assertNumQueries(1): self.assertEqual(obj.text, "xyzzy") with self.assertNumQueries(0): self.assertEqual(obj.text, "xyzzy") # Regression test for #10695. Make sure different instances don't # inadvertently share data in the deferred descriptor objects. i = Item.objects.create(name="no I'm first", value=37) items = Item.objects.only("value").order_by("-value") self.assertEqual(items[0].name, "first") self.assertEqual(items[1].name, "no I'm first") RelatedItem.objects.create(item=i) r = RelatedItem.objects.defer("item").get() self.assertEqual(r.item_id, i.id) self.assertEqual(r.item, i) # Some further checks for select_related() and inherited model # behavior (regression for #10710). c1 = Child.objects.create(name="c1", value=42) c2 = Child.objects.create(name="c2", value=37) Leaf.objects.create(name="l1", child=c1, second_child=c2) obj = Leaf.objects.only("name", "child").select_related()[0] self.assertEqual(obj.child.name, "c1") self.assertQuerySetEqual( Leaf.objects.select_related().only("child__name", "second_child__name"), [ "l1", ], attrgetter("name"), ) # Models instances with deferred fields should still return the same # content types as their non-deferred versions (bug #10738). ctype = ContentType.objects.get_for_model c1 = ctype(Item.objects.all()[0]) c2 = ctype(Item.objects.defer("name")[0]) c3 = ctype(Item.objects.only("name")[0]) self.assertTrue(c1 is c2 is c3) # Regression for #10733 - only() can be used on a model with two # foreign keys. results = Leaf.objects.only("name", "child", "second_child").select_related() self.assertEqual(results[0].child.name, "c1") self.assertEqual(results[0].second_child.name, "c2") results = Leaf.objects.only( "name", "child", "second_child", "child__name", "second_child__name" ).select_related() self.assertEqual(results[0].child.name, "c1") self.assertEqual(results[0].second_child.name, "c2") # Regression for #16409 - make sure defer() and only() work with annotate() self.assertIsInstance( list(SimpleItem.objects.annotate(Count("feature")).defer("name")), list ) self.assertIsInstance( list(SimpleItem.objects.annotate(Count("feature")).only("name")), list ) def test_ticket_16409(self): # Regression for #16409 - make sure defer() and only() work with annotate() self.assertIsInstance( list(SimpleItem.objects.annotate(Count("feature")).defer("name")), list ) self.assertIsInstance( list(SimpleItem.objects.annotate(Count("feature")).only("name")), list ) def test_ticket_23270(self): d = Derived.objects.create(text="foo", other_text="bar") with self.assertNumQueries(1): obj = Base.objects.select_related("derived").defer("text")[0] self.assertIsInstance(obj.derived, Derived) self.assertEqual("bar", obj.derived.other_text) self.assertNotIn("text", obj.__dict__) self.assertEqual(d.pk, obj.derived.base_ptr_id) def test_only_and_defer_usage_on_proxy_models(self): # Regression for #15790 - only() broken for proxy models proxy = Proxy.objects.create(name="proxy", value=42) msg = "QuerySet.only() return bogus results with proxy models" dp = Proxy.objects.only("other_value").get(pk=proxy.pk) self.assertEqual(dp.name, proxy.name, msg=msg) self.assertEqual(dp.value, proxy.value, msg=msg) # also test things with .defer() msg = "QuerySet.defer() return bogus results with proxy models" dp = Proxy.objects.defer("name", "text", "value").get(pk=proxy.pk) self.assertEqual(dp.name, proxy.name, msg=msg) self.assertEqual(dp.value, proxy.value, msg=msg) def test_resolve_columns(self): ResolveThis.objects.create(num=5.0, name="Foobar") qs = ResolveThis.objects.defer("num") self.assertEqual(1, qs.count()) self.assertEqual("Foobar", qs[0].name) def test_reverse_one_to_one_relations(self): # Refs #14694. Test reverse relations which are known unique (reverse # side has o2ofield or unique FK) - the o2o case item = Item.objects.create(name="first", value=42) o2o = OneToOneItem.objects.create(item=item, name="second") self.assertEqual(len(Item.objects.defer("one_to_one_item__name")), 1) self.assertEqual(len(Item.objects.select_related("one_to_one_item")), 1) self.assertEqual( len( Item.objects.select_related("one_to_one_item").defer( "one_to_one_item__name" ) ), 1, ) self.assertEqual( len(Item.objects.select_related("one_to_one_item").defer("value")), 1 ) # Make sure that `only()` doesn't break when we pass in a unique relation, # rather than a field on the relation. self.assertEqual(len(Item.objects.only("one_to_one_item")), 1) with self.assertNumQueries(1): i = Item.objects.select_related("one_to_one_item")[0] self.assertEqual(i.one_to_one_item.pk, o2o.pk) self.assertEqual(i.one_to_one_item.name, "second") with self.assertNumQueries(1): i = Item.objects.select_related("one_to_one_item").defer( "value", "one_to_one_item__name" )[0] self.assertEqual(i.one_to_one_item.pk, o2o.pk) self.assertEqual(i.name, "first") with self.assertNumQueries(1): self.assertEqual(i.one_to_one_item.name, "second") with self.assertNumQueries(1): self.assertEqual(i.value, 42) def test_defer_with_select_related(self): item1 = Item.objects.create(name="first", value=47) item2 = Item.objects.create(name="second", value=42) simple = SimpleItem.objects.create(name="simple", value="23") ItemAndSimpleItem.objects.create(item=item1, simple=simple) obj = ItemAndSimpleItem.objects.defer("item").select_related("simple").get() self.assertEqual(obj.item, item1) self.assertEqual(obj.item_id, item1.id) obj.item = item2 obj.save() obj = ItemAndSimpleItem.objects.defer("item").select_related("simple").get() self.assertEqual(obj.item, item2) self.assertEqual(obj.item_id, item2.id) def test_proxy_model_defer_with_select_related(self): # Regression for #22050 item = Item.objects.create(name="first", value=47) RelatedItem.objects.create(item=item) # Defer fields with only() obj = ProxyRelated.objects.select_related().only("item__name")[0] with self.assertNumQueries(0): self.assertEqual(obj.item.name, "first") with self.assertNumQueries(1): self.assertEqual(obj.item.value, 47) def test_only_with_select_related(self): # Test for #17485. item = SimpleItem.objects.create(name="first", value=47) feature = Feature.objects.create(item=item) SpecialFeature.objects.create(feature=feature) qs = Feature.objects.only("item__name").select_related("item") self.assertEqual(len(qs), 1) qs = SpecialFeature.objects.only("feature__item__name").select_related( "feature__item" ) self.assertEqual(len(qs), 1) def test_defer_annotate_select_related(self): location = Location.objects.create() Request.objects.create(location=location) self.assertIsInstance( list( Request.objects.annotate(Count("items")) .select_related("profile", "location") .only("profile", "location") ), list, ) self.assertIsInstance( list( Request.objects.annotate(Count("items")) .select_related("profile", "location") .only("profile__profile1", "location__location1") ), list, ) self.assertIsInstance( list( Request.objects.annotate(Count("items")) .select_related("profile", "location") .defer("request1", "request2", "request3", "request4") ), list, ) def test_common_model_different_mask(self): child = Child.objects.create(name="Child", value=42) second_child = Child.objects.create(name="Second", value=64) Leaf.objects.create(child=child, second_child=second_child) with self.assertNumQueries(1): leaf = ( Leaf.objects.select_related("child", "second_child") .defer("child__name", "second_child__value") .get() ) self.assertEqual(leaf.child, child) self.assertEqual(leaf.second_child, second_child) self.assertEqual(leaf.child.get_deferred_fields(), {"name"}) self.assertEqual(leaf.second_child.get_deferred_fields(), {"value"}) with self.assertNumQueries(0): self.assertEqual(leaf.child.value, 42) self.assertEqual(leaf.second_child.name, "Second") with self.assertNumQueries(1): self.assertEqual(leaf.child.name, "Child") with self.assertNumQueries(1): self.assertEqual(leaf.second_child.value, 64) class DeferDeletionSignalsTests(TestCase): senders = [Item, Proxy] @classmethod def setUpTestData(cls): cls.item_pk = Item.objects.create(value=1).pk def setUp(self): self.pre_delete_senders = [] self.post_delete_senders = [] for sender in self.senders: models.signals.pre_delete.connect(self.pre_delete_receiver, sender) models.signals.post_delete.connect(self.post_delete_receiver, sender) def tearDown(self): for sender in self.senders: models.signals.pre_delete.disconnect(self.pre_delete_receiver, sender) models.signals.post_delete.disconnect(self.post_delete_receiver, sender) def pre_delete_receiver(self, sender, **kwargs): self.pre_delete_senders.append(sender) def post_delete_receiver(self, sender, **kwargs): self.post_delete_senders.append(sender) def test_delete_defered_model(self): Item.objects.only("value").get(pk=self.item_pk).delete() self.assertEqual(self.pre_delete_senders, [Item]) self.assertEqual(self.post_delete_senders, [Item]) def test_delete_defered_proxy_model(self): Proxy.objects.only("value").get(pk=self.item_pk).delete() self.assertEqual(self.pre_delete_senders, [Proxy]) self.assertEqual(self.post_delete_senders, [Proxy])
9a625039a09ea3e9112e709632a206fc822f0101dcf8cd8829de3af1407c016e
import copy import json import os import pickle import unittest import uuid from django.core.exceptions import DisallowedRedirect from django.core.serializers.json import DjangoJSONEncoder from django.core.signals import request_finished from django.db import close_old_connections from django.http import ( BadHeaderError, HttpResponse, HttpResponseNotAllowed, HttpResponseNotModified, HttpResponsePermanentRedirect, HttpResponseRedirect, JsonResponse, QueryDict, SimpleCookie, StreamingHttpResponse, parse_cookie, ) from django.test import SimpleTestCase from django.utils.functional import lazystr class QueryDictTests(SimpleTestCase): def test_create_with_no_args(self): self.assertEqual(QueryDict(), QueryDict("")) def test_missing_key(self): q = QueryDict() with self.assertRaises(KeyError): q.__getitem__("foo") def test_immutability(self): q = QueryDict() with self.assertRaises(AttributeError): q.__setitem__("something", "bar") with self.assertRaises(AttributeError): q.setlist("foo", ["bar"]) with self.assertRaises(AttributeError): q.appendlist("foo", ["bar"]) with self.assertRaises(AttributeError): q.update({"foo": "bar"}) with self.assertRaises(AttributeError): q.pop("foo") with self.assertRaises(AttributeError): q.popitem() with self.assertRaises(AttributeError): q.clear() def test_immutable_get_with_default(self): q = QueryDict() self.assertEqual(q.get("foo", "default"), "default") def test_immutable_basic_operations(self): q = QueryDict() self.assertEqual(q.getlist("foo"), []) self.assertNotIn("foo", q) self.assertEqual(list(q), []) self.assertEqual(list(q.items()), []) self.assertEqual(list(q.lists()), []) self.assertEqual(list(q.keys()), []) self.assertEqual(list(q.values()), []) self.assertEqual(len(q), 0) self.assertEqual(q.urlencode(), "") def test_single_key_value(self): """Test QueryDict with one key/value pair""" q = QueryDict("foo=bar") self.assertEqual(q["foo"], "bar") with self.assertRaises(KeyError): q.__getitem__("bar") with self.assertRaises(AttributeError): q.__setitem__("something", "bar") self.assertEqual(q.get("foo", "default"), "bar") self.assertEqual(q.get("bar", "default"), "default") self.assertEqual(q.getlist("foo"), ["bar"]) self.assertEqual(q.getlist("bar"), []) with self.assertRaises(AttributeError): q.setlist("foo", ["bar"]) with self.assertRaises(AttributeError): q.appendlist("foo", ["bar"]) self.assertIn("foo", q) self.assertNotIn("bar", q) self.assertEqual(list(q), ["foo"]) self.assertEqual(list(q.items()), [("foo", "bar")]) self.assertEqual(list(q.lists()), [("foo", ["bar"])]) self.assertEqual(list(q.keys()), ["foo"]) self.assertEqual(list(q.values()), ["bar"]) self.assertEqual(len(q), 1) with self.assertRaises(AttributeError): q.update({"foo": "bar"}) with self.assertRaises(AttributeError): q.pop("foo") with self.assertRaises(AttributeError): q.popitem() with self.assertRaises(AttributeError): q.clear() with self.assertRaises(AttributeError): q.setdefault("foo", "bar") self.assertEqual(q.urlencode(), "foo=bar") def test_urlencode(self): q = QueryDict(mutable=True) q["next"] = "/a&b/" self.assertEqual(q.urlencode(), "next=%2Fa%26b%2F") self.assertEqual(q.urlencode(safe="/"), "next=/a%26b/") q = QueryDict(mutable=True) q["next"] = "/t\xebst&key/" self.assertEqual(q.urlencode(), "next=%2Ft%C3%ABst%26key%2F") self.assertEqual(q.urlencode(safe="/"), "next=/t%C3%ABst%26key/") def test_urlencode_int(self): # Normally QueryDict doesn't contain non-string values but lazily # written tests may make that mistake. q = QueryDict(mutable=True) q["a"] = 1 self.assertEqual(q.urlencode(), "a=1") def test_mutable_copy(self): """A copy of a QueryDict is mutable.""" q = QueryDict().copy() with self.assertRaises(KeyError): q.__getitem__("foo") q["name"] = "john" self.assertEqual(q["name"], "john") def test_mutable_delete(self): q = QueryDict(mutable=True) q["name"] = "john" del q["name"] self.assertNotIn("name", q) def test_basic_mutable_operations(self): q = QueryDict(mutable=True) q["name"] = "john" self.assertEqual(q.get("foo", "default"), "default") self.assertEqual(q.get("name", "default"), "john") self.assertEqual(q.getlist("name"), ["john"]) self.assertEqual(q.getlist("foo"), []) q.setlist("foo", ["bar", "baz"]) self.assertEqual(q.get("foo", "default"), "baz") self.assertEqual(q.getlist("foo"), ["bar", "baz"]) q.appendlist("foo", "another") self.assertEqual(q.getlist("foo"), ["bar", "baz", "another"]) self.assertEqual(q["foo"], "another") self.assertIn("foo", q) self.assertCountEqual(q, ["foo", "name"]) self.assertCountEqual(q.items(), [("foo", "another"), ("name", "john")]) self.assertCountEqual( q.lists(), [("foo", ["bar", "baz", "another"]), ("name", ["john"])] ) self.assertCountEqual(q.keys(), ["foo", "name"]) self.assertCountEqual(q.values(), ["another", "john"]) q.update({"foo": "hello"}) self.assertEqual(q["foo"], "hello") self.assertEqual(q.get("foo", "not available"), "hello") self.assertEqual(q.getlist("foo"), ["bar", "baz", "another", "hello"]) self.assertEqual(q.pop("foo"), ["bar", "baz", "another", "hello"]) self.assertEqual(q.pop("foo", "not there"), "not there") self.assertEqual(q.get("foo", "not there"), "not there") self.assertEqual(q.setdefault("foo", "bar"), "bar") self.assertEqual(q["foo"], "bar") self.assertEqual(q.getlist("foo"), ["bar"]) self.assertIn(q.urlencode(), ["foo=bar&name=john", "name=john&foo=bar"]) q.clear() self.assertEqual(len(q), 0) def test_multiple_keys(self): """Test QueryDict with two key/value pairs with same keys.""" q = QueryDict("vote=yes&vote=no") self.assertEqual(q["vote"], "no") with self.assertRaises(AttributeError): q.__setitem__("something", "bar") self.assertEqual(q.get("vote", "default"), "no") self.assertEqual(q.get("foo", "default"), "default") self.assertEqual(q.getlist("vote"), ["yes", "no"]) self.assertEqual(q.getlist("foo"), []) with self.assertRaises(AttributeError): q.setlist("foo", ["bar", "baz"]) with self.assertRaises(AttributeError): q.setlist("foo", ["bar", "baz"]) with self.assertRaises(AttributeError): q.appendlist("foo", ["bar"]) self.assertIn("vote", q) self.assertNotIn("foo", q) self.assertEqual(list(q), ["vote"]) self.assertEqual(list(q.items()), [("vote", "no")]) self.assertEqual(list(q.lists()), [("vote", ["yes", "no"])]) self.assertEqual(list(q.keys()), ["vote"]) self.assertEqual(list(q.values()), ["no"]) self.assertEqual(len(q), 1) with self.assertRaises(AttributeError): q.update({"foo": "bar"}) with self.assertRaises(AttributeError): q.pop("foo") with self.assertRaises(AttributeError): q.popitem() with self.assertRaises(AttributeError): q.clear() with self.assertRaises(AttributeError): q.setdefault("foo", "bar") with self.assertRaises(AttributeError): q.__delitem__("vote") def test_pickle(self): q = QueryDict() q1 = pickle.loads(pickle.dumps(q, 2)) self.assertEqual(q, q1) q = QueryDict("a=b&c=d") q1 = pickle.loads(pickle.dumps(q, 2)) self.assertEqual(q, q1) q = QueryDict("a=b&c=d&a=1") q1 = pickle.loads(pickle.dumps(q, 2)) self.assertEqual(q, q1) def test_update_from_querydict(self): """Regression test for #8278: QueryDict.update(QueryDict)""" x = QueryDict("a=1&a=2", mutable=True) y = QueryDict("a=3&a=4") x.update(y) self.assertEqual(x.getlist("a"), ["1", "2", "3", "4"]) def test_non_default_encoding(self): """#13572 - QueryDict with a non-default encoding""" q = QueryDict("cur=%A4", encoding="iso-8859-15") self.assertEqual(q.encoding, "iso-8859-15") self.assertEqual(list(q.items()), [("cur", "€")]) self.assertEqual(q.urlencode(), "cur=%A4") q = q.copy() self.assertEqual(q.encoding, "iso-8859-15") self.assertEqual(list(q.items()), [("cur", "€")]) self.assertEqual(q.urlencode(), "cur=%A4") self.assertEqual(copy.copy(q).encoding, "iso-8859-15") self.assertEqual(copy.deepcopy(q).encoding, "iso-8859-15") def test_querydict_fromkeys(self): self.assertEqual( QueryDict.fromkeys(["key1", "key2", "key3"]), QueryDict("key1&key2&key3") ) def test_fromkeys_with_nonempty_value(self): self.assertEqual( QueryDict.fromkeys(["key1", "key2", "key3"], value="val"), QueryDict("key1=val&key2=val&key3=val"), ) def test_fromkeys_is_immutable_by_default(self): # Match behavior of __init__() which is also immutable by default. q = QueryDict.fromkeys(["key1", "key2", "key3"]) with self.assertRaisesMessage( AttributeError, "This QueryDict instance is immutable" ): q["key4"] = "nope" def test_fromkeys_mutable_override(self): q = QueryDict.fromkeys(["key1", "key2", "key3"], mutable=True) q["key4"] = "yep" self.assertEqual(q, QueryDict("key1&key2&key3&key4=yep")) def test_duplicates_in_fromkeys_iterable(self): self.assertEqual(QueryDict.fromkeys("xyzzy"), QueryDict("x&y&z&z&y")) def test_fromkeys_with_nondefault_encoding(self): key_utf16 = b"\xff\xfe\x8e\x02\xdd\x01\x9e\x02" value_utf16 = b"\xff\xfe\xdd\x01n\x00l\x00P\x02\x8c\x02" q = QueryDict.fromkeys([key_utf16], value=value_utf16, encoding="utf-16") expected = QueryDict("", mutable=True) expected["ʎǝʞ"] = "ǝnlɐʌ" self.assertEqual(q, expected) def test_fromkeys_empty_iterable(self): self.assertEqual(QueryDict.fromkeys([]), QueryDict("")) def test_fromkeys_noniterable(self): with self.assertRaises(TypeError): QueryDict.fromkeys(0) class HttpResponseTests(SimpleTestCase): def test_headers_type(self): r = HttpResponse() # ASCII strings or bytes values are converted to strings. r.headers["key"] = "test" self.assertEqual(r.headers["key"], "test") r.headers["key"] = b"test" self.assertEqual(r.headers["key"], "test") self.assertIn(b"test", r.serialize_headers()) # Non-ASCII values are serialized to Latin-1. r.headers["key"] = "café" self.assertIn("café".encode("latin-1"), r.serialize_headers()) # Other Unicode values are MIME-encoded (there's no way to pass them as # bytes). r.headers["key"] = "†" self.assertEqual(r.headers["key"], "=?utf-8?b?4oCg?=") self.assertIn(b"=?utf-8?b?4oCg?=", r.serialize_headers()) # The response also converts string or bytes keys to strings, but requires # them to contain ASCII r = HttpResponse() del r.headers["Content-Type"] r.headers["foo"] = "bar" headers = list(r.headers.items()) self.assertEqual(len(headers), 1) self.assertEqual(headers[0], ("foo", "bar")) r = HttpResponse() del r.headers["Content-Type"] r.headers[b"foo"] = "bar" headers = list(r.headers.items()) self.assertEqual(len(headers), 1) self.assertEqual(headers[0], ("foo", "bar")) self.assertIsInstance(headers[0][0], str) r = HttpResponse() with self.assertRaises(UnicodeError): r.headers.__setitem__("føø", "bar") with self.assertRaises(UnicodeError): r.headers.__setitem__("føø".encode(), "bar") def test_long_line(self): # Bug #20889: long lines trigger newlines to be added to headers # (which is not allowed due to bug #10188) h = HttpResponse() f = b"zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88" f = f.decode("utf-8") h.headers["Content-Disposition"] = 'attachment; filename="%s"' % f # This one is triggering https://bugs.python.org/issue20747, that is Python # will itself insert a newline in the header h.headers[ "Content-Disposition" ] = 'attachment; filename="EdelRot_Blu\u0308te (3)-0.JPG"' def test_newlines_in_headers(self): # Bug #10188: Do not allow newlines in headers (CR or LF) r = HttpResponse() with self.assertRaises(BadHeaderError): r.headers.__setitem__("test\rstr", "test") with self.assertRaises(BadHeaderError): r.headers.__setitem__("test\nstr", "test") def test_encoded_with_newlines_in_headers(self): """ Keys & values which throw a UnicodeError when encoding/decoding should still be checked for newlines and re-raised as a BadHeaderError. These specifically would still throw BadHeaderError after decoding successfully, because the newlines are sandwiched in the middle of the string and email.Header leaves those as they are. """ r = HttpResponse() pairs = ( ("†\nother", "test"), ("test", "†\nother"), (b"\xe2\x80\xa0\nother", "test"), ("test", b"\xe2\x80\xa0\nother"), ) msg = "Header values can't contain newlines" for key, value in pairs: with self.subTest(key=key, value=value): with self.assertRaisesMessage(BadHeaderError, msg): r[key] = value def test_dict_behavior(self): """ Test for bug #14020: Make HttpResponse.get work like dict.get """ r = HttpResponse() self.assertIsNone(r.get("test")) def test_non_string_content(self): # Bug 16494: HttpResponse should behave consistently with non-strings r = HttpResponse(12345) self.assertEqual(r.content, b"12345") # test content via property r = HttpResponse() r.content = 12345 self.assertEqual(r.content, b"12345") def test_memoryview_content(self): r = HttpResponse(memoryview(b"memoryview")) self.assertEqual(r.content, b"memoryview") def test_iter_content(self): r = HttpResponse(["abc", "def", "ghi"]) self.assertEqual(r.content, b"abcdefghi") # test iter content via property r = HttpResponse() r.content = ["idan", "alex", "jacob"] self.assertEqual(r.content, b"idanalexjacob") r = HttpResponse() r.content = [1, 2, 3] self.assertEqual(r.content, b"123") # test odd inputs r = HttpResponse() r.content = ["1", "2", 3, "\u079e"] # '\xde\x9e' == unichr(1950).encode() self.assertEqual(r.content, b"123\xde\x9e") # .content can safely be accessed multiple times. r = HttpResponse(iter(["hello", "world"])) self.assertEqual(r.content, r.content) self.assertEqual(r.content, b"helloworld") # __iter__ can safely be called multiple times (#20187). self.assertEqual(b"".join(r), b"helloworld") self.assertEqual(b"".join(r), b"helloworld") # Accessing .content still works. self.assertEqual(r.content, b"helloworld") # Accessing .content also works if the response was iterated first. r = HttpResponse(iter(["hello", "world"])) self.assertEqual(b"".join(r), b"helloworld") self.assertEqual(r.content, b"helloworld") # Additional content can be written to the response. r = HttpResponse(iter(["hello", "world"])) self.assertEqual(r.content, b"helloworld") r.write("!") self.assertEqual(r.content, b"helloworld!") def test_iterator_isnt_rewound(self): # Regression test for #13222 r = HttpResponse("abc") i = iter(r) self.assertEqual(list(i), [b"abc"]) self.assertEqual(list(i), []) def test_lazy_content(self): r = HttpResponse(lazystr("helloworld")) self.assertEqual(r.content, b"helloworld") def test_file_interface(self): r = HttpResponse() r.write(b"hello") self.assertEqual(r.tell(), 5) r.write("привет") self.assertEqual(r.tell(), 17) r = HttpResponse(["abc"]) r.write("def") self.assertEqual(r.tell(), 6) self.assertEqual(r.content, b"abcdef") # with Content-Encoding header r = HttpResponse() r.headers["Content-Encoding"] = "winning" r.write(b"abc") r.write(b"def") self.assertEqual(r.content, b"abcdef") def test_stream_interface(self): r = HttpResponse("asdf") self.assertEqual(r.getvalue(), b"asdf") r = HttpResponse() self.assertIs(r.writable(), True) r.writelines(["foo\n", "bar\n", "baz\n"]) self.assertEqual(r.content, b"foo\nbar\nbaz\n") def test_unsafe_redirect(self): bad_urls = [ 'data:text/html,<script>window.alert("xss")</script>', "mailto:[email protected]", "file:///etc/passwd", ] for url in bad_urls: with self.assertRaises(DisallowedRedirect): HttpResponseRedirect(url) with self.assertRaises(DisallowedRedirect): HttpResponsePermanentRedirect(url) def test_header_deletion(self): r = HttpResponse("hello") r.headers["X-Foo"] = "foo" del r.headers["X-Foo"] self.assertNotIn("X-Foo", r.headers) # del doesn't raise a KeyError on nonexistent headers. del r.headers["X-Foo"] def test_instantiate_with_headers(self): r = HttpResponse("hello", headers={"X-Foo": "foo"}) self.assertEqual(r.headers["X-Foo"], "foo") self.assertEqual(r.headers["x-foo"], "foo") def test_content_type(self): r = HttpResponse("hello", content_type="application/json") self.assertEqual(r.headers["Content-Type"], "application/json") def test_content_type_headers(self): r = HttpResponse("hello", headers={"Content-Type": "application/json"}) self.assertEqual(r.headers["Content-Type"], "application/json") def test_content_type_mutually_exclusive(self): msg = ( "'headers' must not contain 'Content-Type' when the " "'content_type' parameter is provided." ) with self.assertRaisesMessage(ValueError, msg): HttpResponse( "hello", content_type="application/json", headers={"Content-Type": "text/csv"}, ) class HttpResponseSubclassesTests(SimpleTestCase): def test_redirect(self): response = HttpResponseRedirect("/redirected/") self.assertEqual(response.status_code, 302) # Standard HttpResponse init args can be used response = HttpResponseRedirect( "/redirected/", content="The resource has temporarily moved", ) self.assertContains( response, "The resource has temporarily moved", status_code=302 ) self.assertEqual(response.url, response.headers["Location"]) def test_redirect_lazy(self): """Make sure HttpResponseRedirect works with lazy strings.""" r = HttpResponseRedirect(lazystr("/redirected/")) self.assertEqual(r.url, "/redirected/") def test_redirect_repr(self): response = HttpResponseRedirect("/redirected/") expected = ( '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", ' 'url="/redirected/">' ) self.assertEqual(repr(response), expected) def test_invalid_redirect_repr(self): """ If HttpResponseRedirect raises DisallowedRedirect, its __repr__() should work (in the debug view, for example). """ response = HttpResponseRedirect.__new__(HttpResponseRedirect) with self.assertRaisesMessage( DisallowedRedirect, "Unsafe redirect to URL with protocol 'ssh'" ): HttpResponseRedirect.__init__(response, "ssh://foo") expected = ( '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", ' 'url="ssh://foo">' ) self.assertEqual(repr(response), expected) def test_not_modified(self): response = HttpResponseNotModified() self.assertEqual(response.status_code, 304) # 304 responses should not have content/content-type with self.assertRaises(AttributeError): response.content = "Hello dear" self.assertNotIn("content-type", response) def test_not_modified_repr(self): response = HttpResponseNotModified() self.assertEqual(repr(response), "<HttpResponseNotModified status_code=304>") def test_not_allowed(self): response = HttpResponseNotAllowed(["GET"]) self.assertEqual(response.status_code, 405) # Standard HttpResponse init args can be used response = HttpResponseNotAllowed( ["GET"], content="Only the GET method is allowed" ) self.assertContains(response, "Only the GET method is allowed", status_code=405) def test_not_allowed_repr(self): response = HttpResponseNotAllowed(["GET", "OPTIONS"], content_type="text/plain") expected = ( '<HttpResponseNotAllowed [GET, OPTIONS] status_code=405, "text/plain">' ) self.assertEqual(repr(response), expected) def test_not_allowed_repr_no_content_type(self): response = HttpResponseNotAllowed(("GET", "POST")) del response.headers["Content-Type"] self.assertEqual( repr(response), "<HttpResponseNotAllowed [GET, POST] status_code=405>" ) class JsonResponseTests(SimpleTestCase): def test_json_response_non_ascii(self): data = {"key": "łóżko"} response = JsonResponse(data) self.assertEqual(json.loads(response.content.decode()), data) def test_json_response_raises_type_error_with_default_setting(self): with self.assertRaisesMessage( TypeError, "In order to allow non-dict objects to be serialized set the " "safe parameter to False", ): JsonResponse([1, 2, 3]) def test_json_response_text(self): response = JsonResponse("foobar", safe=False) self.assertEqual(json.loads(response.content.decode()), "foobar") def test_json_response_list(self): response = JsonResponse(["foo", "bar"], safe=False) self.assertEqual(json.loads(response.content.decode()), ["foo", "bar"]) def test_json_response_uuid(self): u = uuid.uuid4() response = JsonResponse(u, safe=False) self.assertEqual(json.loads(response.content.decode()), str(u)) def test_json_response_custom_encoder(self): class CustomDjangoJSONEncoder(DjangoJSONEncoder): def encode(self, o): return json.dumps({"foo": "bar"}) response = JsonResponse({}, encoder=CustomDjangoJSONEncoder) self.assertEqual(json.loads(response.content.decode()), {"foo": "bar"}) def test_json_response_passing_arguments_to_json_dumps(self): response = JsonResponse({"foo": "bar"}, json_dumps_params={"indent": 2}) self.assertEqual(response.content.decode(), '{\n "foo": "bar"\n}') class StreamingHttpResponseTests(SimpleTestCase): def test_streaming_response(self): r = StreamingHttpResponse(iter(["hello", "world"])) # iterating over the response itself yields bytestring chunks. chunks = list(r) self.assertEqual(chunks, [b"hello", b"world"]) for chunk in chunks: self.assertIsInstance(chunk, bytes) # and the response can only be iterated once. self.assertEqual(list(r), []) # even when a sequence that can be iterated many times, like a list, # is given as content. r = StreamingHttpResponse(["abc", "def"]) self.assertEqual(list(r), [b"abc", b"def"]) self.assertEqual(list(r), []) # iterating over strings still yields bytestring chunks. r.streaming_content = iter(["hello", "café"]) chunks = list(r) # '\xc3\xa9' == unichr(233).encode() self.assertEqual(chunks, [b"hello", b"caf\xc3\xa9"]) for chunk in chunks: self.assertIsInstance(chunk, bytes) # streaming responses don't have a `content` attribute. self.assertFalse(hasattr(r, "content")) # and you can't accidentally assign to a `content` attribute. with self.assertRaises(AttributeError): r.content = "xyz" # but they do have a `streaming_content` attribute. self.assertTrue(hasattr(r, "streaming_content")) # that exists so we can check if a response is streaming, and wrap or # replace the content iterator. r.streaming_content = iter(["abc", "def"]) r.streaming_content = (chunk.upper() for chunk in r.streaming_content) self.assertEqual(list(r), [b"ABC", b"DEF"]) # coercing a streaming response to bytes doesn't return a complete HTTP # message like a regular response does. it only gives us the headers. r = StreamingHttpResponse(iter(["hello", "world"])) self.assertEqual(bytes(r), b"Content-Type: text/html; charset=utf-8") # and this won't consume its content. self.assertEqual(list(r), [b"hello", b"world"]) # additional content cannot be written to the response. r = StreamingHttpResponse(iter(["hello", "world"])) with self.assertRaises(Exception): r.write("!") # and we can't tell the current position. with self.assertRaises(Exception): r.tell() r = StreamingHttpResponse(iter(["hello", "world"])) self.assertEqual(r.getvalue(), b"helloworld") def test_repr(self): r = StreamingHttpResponse(iter(["hello", "café"])) self.assertEqual( repr(r), '<StreamingHttpResponse status_code=200, "text/html; charset=utf-8">', ) async def test_async_streaming_response(self): async def async_iter(): yield b"hello" yield b"world" r = StreamingHttpResponse(async_iter()) chunks = [] async for chunk in r: chunks.append(chunk) self.assertEqual(chunks, [b"hello", b"world"]) def test_async_streaming_response_warning(self): async def async_iter(): yield b"hello" yield b"world" r = StreamingHttpResponse(async_iter()) msg = ( "StreamingHttpResponse must consume asynchronous iterators in order to " "serve them synchronously. Use a synchronous iterator instead." ) with self.assertWarnsMessage(Warning, msg): self.assertEqual(list(r), [b"hello", b"world"]) async def test_sync_streaming_response_warning(self): r = StreamingHttpResponse(iter(["hello", "world"])) msg = ( "StreamingHttpResponse must consume synchronous iterators in order to " "serve them asynchronously. Use an asynchronous iterator instead." ) with self.assertWarnsMessage(Warning, msg): self.assertEqual(b"hello", await anext(aiter(r))) class FileCloseTests(SimpleTestCase): def setUp(self): # Disable the request_finished signal during this test # to avoid interfering with the database connection. request_finished.disconnect(close_old_connections) def tearDown(self): request_finished.connect(close_old_connections) def test_response(self): filename = os.path.join(os.path.dirname(__file__), "abc.txt") # file isn't closed until we close the response. file1 = open(filename) r = HttpResponse(file1) self.assertTrue(file1.closed) r.close() # when multiple file are assigned as content, make sure they are all # closed with the response. file1 = open(filename) file2 = open(filename) r = HttpResponse(file1) r.content = file2 self.assertTrue(file1.closed) self.assertTrue(file2.closed) def test_streaming_response(self): filename = os.path.join(os.path.dirname(__file__), "abc.txt") # file isn't closed until we close the response. file1 = open(filename) r = StreamingHttpResponse(file1) self.assertFalse(file1.closed) r.close() self.assertTrue(file1.closed) # when multiple file are assigned as content, make sure they are all # closed with the response. file1 = open(filename) file2 = open(filename) r = StreamingHttpResponse(file1) r.streaming_content = file2 self.assertFalse(file1.closed) self.assertFalse(file2.closed) r.close() self.assertTrue(file1.closed) self.assertTrue(file2.closed) class CookieTests(unittest.TestCase): def test_encode(self): """Semicolons and commas are encoded.""" c = SimpleCookie() c["test"] = "An,awkward;value" self.assertNotIn(";", c.output().rstrip(";")) # IE compat self.assertNotIn(",", c.output().rstrip(";")) # Safari compat def test_decode(self): """Semicolons and commas are decoded.""" c = SimpleCookie() c["test"] = "An,awkward;value" c2 = SimpleCookie() c2.load(c.output()[12:]) self.assertEqual(c["test"].value, c2["test"].value) c3 = parse_cookie(c.output()[12:]) self.assertEqual(c["test"].value, c3["test"]) def test_nonstandard_keys(self): """ A single non-standard cookie name doesn't affect all cookies (#13007). """ self.assertIn("good_cookie", parse_cookie("good_cookie=yes;bad:cookie=yes")) def test_repeated_nonstandard_keys(self): """ A repeated non-standard name doesn't affect all cookies (#15852). """ self.assertIn("good_cookie", parse_cookie("a:=b; a:=c; good_cookie=yes")) def test_python_cookies(self): """ Test cases copied from Python's Lib/test/test_http_cookies.py """ self.assertEqual( parse_cookie("chips=ahoy; vienna=finger"), {"chips": "ahoy", "vienna": "finger"}, ) # Here parse_cookie() differs from Python's cookie parsing in that it # treats all semicolons as delimiters, even within quotes. self.assertEqual( parse_cookie('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'), {"keebler": '"E=mc2', "L": '\\"Loves\\"', "fudge": "\\012", "": '"'}, ) # Illegal cookies that have an '=' char in an unquoted value. self.assertEqual(parse_cookie("keebler=E=mc2"), {"keebler": "E=mc2"}) # Cookies with ':' character in their name. self.assertEqual( parse_cookie("key:term=value:term"), {"key:term": "value:term"} ) # Cookies with '[' and ']'. self.assertEqual( parse_cookie("a=b; c=[; d=r; f=h"), {"a": "b", "c": "[", "d": "r", "f": "h"} ) def test_cookie_edgecases(self): # Cookies that RFC 6265 allows. self.assertEqual( parse_cookie("a=b; Domain=example.com"), {"a": "b", "Domain": "example.com"} ) # parse_cookie() has historically kept only the last cookie with the # same name. self.assertEqual(parse_cookie("a=b; h=i; a=c"), {"a": "c", "h": "i"}) def test_invalid_cookies(self): """ Cookie strings that go against RFC 6265 but browsers will send if set via document.cookie. """ # Chunks without an equals sign appear as unnamed values per # https://bugzilla.mozilla.org/show_bug.cgi?id=169091 self.assertIn( "django_language", parse_cookie("abc=def; unnamed; django_language=en") ) # Even a double quote may be an unnamed value. self.assertEqual(parse_cookie('a=b; "; c=d'), {"a": "b", "": '"', "c": "d"}) # Spaces in names and values, and an equals sign in values. self.assertEqual( parse_cookie("a b c=d e = f; gh=i"), {"a b c": "d e = f", "gh": "i"} ) # More characters the spec forbids. self.assertEqual( parse_cookie('a b,c<>@:/[]?{}=d " =e,f g'), {"a b,c<>@:/[]?{}": 'd " =e,f g'}, ) # Unicode characters. The spec only allows ASCII. self.assertEqual( parse_cookie("saint=André Bessette"), {"saint": "André Bessette"} ) # Browsers don't send extra whitespace or semicolons in Cookie headers, # but parse_cookie() should parse whitespace the same way # document.cookie parses whitespace. self.assertEqual( parse_cookie(" = b ; ; = ; c = ; "), {"": "b", "c": ""} ) def test_samesite(self): c = SimpleCookie("name=value; samesite=lax; httponly") self.assertEqual(c["name"]["samesite"], "lax") self.assertIn("SameSite=lax", c.output()) def test_httponly_after_load(self): c = SimpleCookie() c.load("name=val") c["name"]["httponly"] = True self.assertTrue(c["name"]["httponly"]) def test_load_dict(self): c = SimpleCookie() c.load({"name": "val"}) self.assertEqual(c["name"].value, "val") def test_pickle(self): rawdata = 'Customer="WILE_E_COYOTE"; Path=/acme; Version=1' expected_output = "Set-Cookie: %s" % rawdata C = SimpleCookie() C.load(rawdata) self.assertEqual(C.output(), expected_output) for proto in range(pickle.HIGHEST_PROTOCOL + 1): C1 = pickle.loads(pickle.dumps(C, protocol=proto)) self.assertEqual(C1.output(), expected_output) class HttpResponseHeadersTestCase(SimpleTestCase): """Headers by treating HttpResponse like a dictionary.""" def test_headers(self): response = HttpResponse() response["X-Foo"] = "bar" self.assertEqual(response["X-Foo"], "bar") self.assertEqual(response.headers["X-Foo"], "bar") self.assertIn("X-Foo", response) self.assertIs(response.has_header("X-Foo"), True) del response["X-Foo"] self.assertNotIn("X-Foo", response) self.assertNotIn("X-Foo", response.headers) # del doesn't raise a KeyError on nonexistent headers. del response["X-Foo"] def test_headers_as_iterable_of_tuple_pairs(self): response = HttpResponse(headers=(("X-Foo", "bar"),)) self.assertEqual(response["X-Foo"], "bar") def test_headers_bytestring(self): response = HttpResponse() response["X-Foo"] = b"bar" self.assertEqual(response["X-Foo"], "bar") self.assertEqual(response.headers["X-Foo"], "bar") def test_newlines_in_headers(self): response = HttpResponse() with self.assertRaises(BadHeaderError): response["test\rstr"] = "test" with self.assertRaises(BadHeaderError): response["test\nstr"] = "test"
a6e15ef3892c63f4bae2c3b45382f854b1c01742161e30fb9d34caf80969c65e
import datetime import pickle from io import StringIO from operator import attrgetter from unittest.mock import Mock from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.core import management from django.db import DEFAULT_DB_ALIAS, router, transaction from django.db.models import signals from django.db.utils import ConnectionRouter from django.test import SimpleTestCase, TestCase, override_settings from .models import Book, Person, Pet, Review, UserProfile from .routers import AuthRouter, TestRouter, WriteRouter class QueryTestCase(TestCase): databases = {"default", "other"} def test_db_selection(self): "Querysets will use the default database by default" self.assertEqual(Book.objects.db, DEFAULT_DB_ALIAS) self.assertEqual(Book.objects.all().db, DEFAULT_DB_ALIAS) self.assertEqual(Book.objects.using("other").db, "other") self.assertEqual(Book.objects.db_manager("other").db, "other") self.assertEqual(Book.objects.db_manager("other").all().db, "other") def test_default_creation(self): "Objects created on the default database don't leak onto other databases" # Create a book on the default database using create() Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16)) # Create a book on the default database using a save dive = Book() dive.title = "Dive into Python" dive.published = datetime.date(2009, 5, 4) dive.save() # Book exists on the default database, but not on other database try: Book.objects.get(title="Pro Django") Book.objects.using("default").get(title="Pro Django") except Book.DoesNotExist: self.fail('"Pro Django" should exist on default database') with self.assertRaises(Book.DoesNotExist): Book.objects.using("other").get(title="Pro Django") try: Book.objects.get(title="Dive into Python") Book.objects.using("default").get(title="Dive into Python") except Book.DoesNotExist: self.fail('"Dive into Python" should exist on default database') with self.assertRaises(Book.DoesNotExist): Book.objects.using("other").get(title="Dive into Python") def test_other_creation(self): "Objects created on another database don't leak onto the default database" # Create a book on the second database Book.objects.using("other").create( title="Pro Django", published=datetime.date(2008, 12, 16) ) # Create a book on the default database using a save dive = Book() dive.title = "Dive into Python" dive.published = datetime.date(2009, 5, 4) dive.save(using="other") # Book exists on the default database, but not on other database try: Book.objects.using("other").get(title="Pro Django") except Book.DoesNotExist: self.fail('"Pro Django" should exist on other database') with self.assertRaises(Book.DoesNotExist): Book.objects.get(title="Pro Django") with self.assertRaises(Book.DoesNotExist): Book.objects.using("default").get(title="Pro Django") try: Book.objects.using("other").get(title="Dive into Python") except Book.DoesNotExist: self.fail('"Dive into Python" should exist on other database') with self.assertRaises(Book.DoesNotExist): Book.objects.get(title="Dive into Python") with self.assertRaises(Book.DoesNotExist): Book.objects.using("default").get(title="Dive into Python") def test_refresh(self): dive = Book(title="Dive into Python", published=datetime.date(2009, 5, 4)) dive.save(using="other") dive2 = Book.objects.using("other").get() dive2.title = "Dive into Python (on default)" dive2.save(using="default") dive.refresh_from_db() self.assertEqual(dive.title, "Dive into Python") dive.refresh_from_db(using="default") self.assertEqual(dive.title, "Dive into Python (on default)") self.assertEqual(dive._state.db, "default") def test_refresh_router_instance_hint(self): router = Mock() router.db_for_read.return_value = None book = Book.objects.create( title="Dive Into Python", published=datetime.date(1957, 10, 12) ) with self.settings(DATABASE_ROUTERS=[router]): book.refresh_from_db() router.db_for_read.assert_called_once_with(Book, instance=book) def test_basic_queries(self): "Queries are constrained to a single database" dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) dive = Book.objects.using("other").get(published=datetime.date(2009, 5, 4)) self.assertEqual(dive.title, "Dive into Python") with self.assertRaises(Book.DoesNotExist): Book.objects.using("default").get(published=datetime.date(2009, 5, 4)) dive = Book.objects.using("other").get(title__icontains="dive") self.assertEqual(dive.title, "Dive into Python") with self.assertRaises(Book.DoesNotExist): Book.objects.using("default").get(title__icontains="dive") dive = Book.objects.using("other").get(title__iexact="dive INTO python") self.assertEqual(dive.title, "Dive into Python") with self.assertRaises(Book.DoesNotExist): Book.objects.using("default").get(title__iexact="dive INTO python") dive = Book.objects.using("other").get(published__year=2009) self.assertEqual(dive.title, "Dive into Python") self.assertEqual(dive.published, datetime.date(2009, 5, 4)) with self.assertRaises(Book.DoesNotExist): Book.objects.using("default").get(published__year=2009) years = Book.objects.using("other").dates("published", "year") self.assertEqual([o.year for o in years], [2009]) years = Book.objects.using("default").dates("published", "year") self.assertEqual([o.year for o in years], []) months = Book.objects.using("other").dates("published", "month") self.assertEqual([o.month for o in months], [5]) months = Book.objects.using("default").dates("published", "month") self.assertEqual([o.month for o in months], []) def test_m2m_separation(self): "M2M fields are constrained to a single database" # Create a book and author on the default database pro = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) marty = Person.objects.create(name="Marty Alchin") # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) mark = Person.objects.using("other").create(name="Mark Pilgrim") # Save the author relations pro.authors.set([marty]) dive.authors.set([mark]) # Inspect the m2m tables directly. # There should be 1 entry in each database self.assertEqual(Book.authors.through.objects.using("default").count(), 1) self.assertEqual(Book.authors.through.objects.using("other").count(), 1) # Queries work across m2m joins self.assertEqual( list( Book.objects.using("default") .filter(authors__name="Marty Alchin") .values_list("title", flat=True) ), ["Pro Django"], ) self.assertEqual( list( Book.objects.using("other") .filter(authors__name="Marty Alchin") .values_list("title", flat=True) ), [], ) self.assertEqual( list( Book.objects.using("default") .filter(authors__name="Mark Pilgrim") .values_list("title", flat=True) ), [], ) self.assertEqual( list( Book.objects.using("other") .filter(authors__name="Mark Pilgrim") .values_list("title", flat=True) ), ["Dive into Python"], ) # Reget the objects to clear caches dive = Book.objects.using("other").get(title="Dive into Python") mark = Person.objects.using("other").get(name="Mark Pilgrim") # Retrieve related object by descriptor. Related objects should be # database-bound. self.assertEqual( list(dive.authors.values_list("name", flat=True)), ["Mark Pilgrim"] ) self.assertEqual( list(mark.book_set.values_list("title", flat=True)), ["Dive into Python"], ) def test_m2m_forward_operations(self): "M2M forward manipulations are all constrained to a single DB" # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) mark = Person.objects.using("other").create(name="Mark Pilgrim") # Save the author relations dive.authors.set([mark]) # Add a second author john = Person.objects.using("other").create(name="John Smith") self.assertEqual( list( Book.objects.using("other") .filter(authors__name="John Smith") .values_list("title", flat=True) ), [], ) dive.authors.add(john) self.assertEqual( list( Book.objects.using("other") .filter(authors__name="Mark Pilgrim") .values_list("title", flat=True) ), ["Dive into Python"], ) self.assertEqual( list( Book.objects.using("other") .filter(authors__name="John Smith") .values_list("title", flat=True) ), ["Dive into Python"], ) # Remove the second author dive.authors.remove(john) self.assertEqual( list( Book.objects.using("other") .filter(authors__name="Mark Pilgrim") .values_list("title", flat=True) ), ["Dive into Python"], ) self.assertEqual( list( Book.objects.using("other") .filter(authors__name="John Smith") .values_list("title", flat=True) ), [], ) # Clear all authors dive.authors.clear() self.assertEqual( list( Book.objects.using("other") .filter(authors__name="Mark Pilgrim") .values_list("title", flat=True) ), [], ) self.assertEqual( list( Book.objects.using("other") .filter(authors__name="John Smith") .values_list("title", flat=True) ), [], ) # Create an author through the m2m interface dive.authors.create(name="Jane Brown") self.assertEqual( list( Book.objects.using("other") .filter(authors__name="Mark Pilgrim") .values_list("title", flat=True) ), [], ) self.assertEqual( list( Book.objects.using("other") .filter(authors__name="Jane Brown") .values_list("title", flat=True) ), ["Dive into Python"], ) def test_m2m_reverse_operations(self): "M2M reverse manipulations are all constrained to a single DB" # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) mark = Person.objects.using("other").create(name="Mark Pilgrim") # Save the author relations dive.authors.set([mark]) # Create a second book on the other database grease = Book.objects.using("other").create( title="Greasemonkey Hacks", published=datetime.date(2005, 11, 1) ) # Add a books to the m2m mark.book_set.add(grease) self.assertEqual( list( Person.objects.using("other") .filter(book__title="Dive into Python") .values_list("name", flat=True) ), ["Mark Pilgrim"], ) self.assertEqual( list( Person.objects.using("other") .filter(book__title="Greasemonkey Hacks") .values_list("name", flat=True) ), ["Mark Pilgrim"], ) # Remove a book from the m2m mark.book_set.remove(grease) self.assertEqual( list( Person.objects.using("other") .filter(book__title="Dive into Python") .values_list("name", flat=True) ), ["Mark Pilgrim"], ) self.assertEqual( list( Person.objects.using("other") .filter(book__title="Greasemonkey Hacks") .values_list("name", flat=True) ), [], ) # Clear the books associated with mark mark.book_set.clear() self.assertEqual( list( Person.objects.using("other") .filter(book__title="Dive into Python") .values_list("name", flat=True) ), [], ) self.assertEqual( list( Person.objects.using("other") .filter(book__title="Greasemonkey Hacks") .values_list("name", flat=True) ), [], ) # Create a book through the m2m interface mark.book_set.create( title="Dive into HTML5", published=datetime.date(2020, 1, 1) ) self.assertEqual( list( Person.objects.using("other") .filter(book__title="Dive into Python") .values_list("name", flat=True) ), [], ) self.assertEqual( list( Person.objects.using("other") .filter(book__title="Dive into HTML5") .values_list("name", flat=True) ), ["Mark Pilgrim"], ) def test_m2m_cross_database_protection(self): "Operations that involve sharing M2M objects across databases raise an error" # Create a book and author on the default database pro = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) marty = Person.objects.create(name="Marty Alchin") # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) mark = Person.objects.using("other").create(name="Mark Pilgrim") # Set a foreign key set with an object from a different database msg = ( 'Cannot assign "<Person: Marty Alchin>": the current database ' "router prevents this relation." ) with self.assertRaisesMessage(ValueError, msg): with transaction.atomic(using="default"): marty.edited.set([pro, dive]) # Add to an m2m with an object from a different database msg = ( 'Cannot add "<Book: Dive into Python>": instance is on ' 'database "default", value is on database "other"' ) with self.assertRaisesMessage(ValueError, msg): with transaction.atomic(using="default"): marty.book_set.add(dive) # Set a m2m with an object from a different database with self.assertRaisesMessage(ValueError, msg): with transaction.atomic(using="default"): marty.book_set.set([pro, dive]) # Add to a reverse m2m with an object from a different database msg = ( 'Cannot add "<Person: Marty Alchin>": instance is on ' 'database "other", value is on database "default"' ) with self.assertRaisesMessage(ValueError, msg): with transaction.atomic(using="other"): dive.authors.add(marty) # Set a reverse m2m with an object from a different database with self.assertRaisesMessage(ValueError, msg): with transaction.atomic(using="other"): dive.authors.set([mark, marty]) def test_m2m_deletion(self): "Cascaded deletions of m2m relations issue queries on the right database" # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) mark = Person.objects.using("other").create(name="Mark Pilgrim") dive.authors.set([mark]) # Check the initial state self.assertEqual(Person.objects.using("default").count(), 0) self.assertEqual(Book.objects.using("default").count(), 0) self.assertEqual(Book.authors.through.objects.using("default").count(), 0) self.assertEqual(Person.objects.using("other").count(), 1) self.assertEqual(Book.objects.using("other").count(), 1) self.assertEqual(Book.authors.through.objects.using("other").count(), 1) # Delete the object on the other database dive.delete(using="other") self.assertEqual(Person.objects.using("default").count(), 0) self.assertEqual(Book.objects.using("default").count(), 0) self.assertEqual(Book.authors.through.objects.using("default").count(), 0) # The person still exists ... self.assertEqual(Person.objects.using("other").count(), 1) # ... but the book has been deleted self.assertEqual(Book.objects.using("other").count(), 0) # ... and the relationship object has also been deleted. self.assertEqual(Book.authors.through.objects.using("other").count(), 0) # Now try deletion in the reverse direction. Set up the relation again dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) dive.authors.set([mark]) # Check the initial state self.assertEqual(Person.objects.using("default").count(), 0) self.assertEqual(Book.objects.using("default").count(), 0) self.assertEqual(Book.authors.through.objects.using("default").count(), 0) self.assertEqual(Person.objects.using("other").count(), 1) self.assertEqual(Book.objects.using("other").count(), 1) self.assertEqual(Book.authors.through.objects.using("other").count(), 1) # Delete the object on the other database mark.delete(using="other") self.assertEqual(Person.objects.using("default").count(), 0) self.assertEqual(Book.objects.using("default").count(), 0) self.assertEqual(Book.authors.through.objects.using("default").count(), 0) # The person has been deleted ... self.assertEqual(Person.objects.using("other").count(), 0) # ... but the book still exists self.assertEqual(Book.objects.using("other").count(), 1) # ... and the relationship object has been deleted. self.assertEqual(Book.authors.through.objects.using("other").count(), 0) def test_foreign_key_separation(self): "FK fields are constrained to a single database" # Create a book and author on the default database pro = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) george = Person.objects.create(name="George Vilches") # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) chris = Person.objects.using("other").create(name="Chris Mills") # Save the author's favorite books pro.editor = george pro.save() dive.editor = chris dive.save() pro = Book.objects.using("default").get(title="Pro Django") self.assertEqual(pro.editor.name, "George Vilches") dive = Book.objects.using("other").get(title="Dive into Python") self.assertEqual(dive.editor.name, "Chris Mills") # Queries work across foreign key joins self.assertEqual( list( Person.objects.using("default") .filter(edited__title="Pro Django") .values_list("name", flat=True) ), ["George Vilches"], ) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Pro Django") .values_list("name", flat=True) ), [], ) self.assertEqual( list( Person.objects.using("default") .filter(edited__title="Dive into Python") .values_list("name", flat=True) ), [], ) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into Python") .values_list("name", flat=True) ), ["Chris Mills"], ) # Reget the objects to clear caches chris = Person.objects.using("other").get(name="Chris Mills") dive = Book.objects.using("other").get(title="Dive into Python") # Retrieve related object by descriptor. Related objects should be # database-bound. self.assertEqual( list(chris.edited.values_list("title", flat=True)), ["Dive into Python"] ) def test_foreign_key_reverse_operations(self): "FK reverse manipulations are all constrained to a single DB" dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) chris = Person.objects.using("other").create(name="Chris Mills") # Save the author relations dive.editor = chris dive.save() # Add a second book edited by chris html5 = Book.objects.using("other").create( title="Dive into HTML5", published=datetime.date(2010, 3, 15) ) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into HTML5") .values_list("name", flat=True) ), [], ) chris.edited.add(html5) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into HTML5") .values_list("name", flat=True) ), ["Chris Mills"], ) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into Python") .values_list("name", flat=True) ), ["Chris Mills"], ) # Remove the second editor chris.edited.remove(html5) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into HTML5") .values_list("name", flat=True) ), [], ) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into Python") .values_list("name", flat=True) ), ["Chris Mills"], ) # Clear all edited books chris.edited.clear() self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into HTML5") .values_list("name", flat=True) ), [], ) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into Python") .values_list("name", flat=True) ), [], ) # Create an author through the m2m interface chris.edited.create( title="Dive into Water", published=datetime.date(2010, 3, 15) ) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into HTML5") .values_list("name", flat=True) ), [], ) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into Water") .values_list("name", flat=True) ), ["Chris Mills"], ) self.assertEqual( list( Person.objects.using("other") .filter(edited__title="Dive into Python") .values_list("name", flat=True) ), [], ) def test_foreign_key_cross_database_protection(self): "Operations that involve sharing FK objects across databases raise an error" # Create a book and author on the default database pro = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) marty = Person.objects.create(name="Marty Alchin") # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) # Set a foreign key with an object from a different database msg = ( 'Cannot assign "<Person: Marty Alchin>": the current database ' "router prevents this relation." ) with self.assertRaisesMessage(ValueError, msg): dive.editor = marty # Set a foreign key set with an object from a different database with self.assertRaisesMessage(ValueError, msg): with transaction.atomic(using="default"): marty.edited.set([pro, dive]) # Add to a foreign key set with an object from a different database with self.assertRaisesMessage(ValueError, msg): with transaction.atomic(using="default"): marty.edited.add(dive) def test_foreign_key_deletion(self): """ Cascaded deletions of Foreign Key relations issue queries on the right database. """ mark = Person.objects.using("other").create(name="Mark Pilgrim") Pet.objects.using("other").create(name="Fido", owner=mark) # Check the initial state self.assertEqual(Person.objects.using("default").count(), 0) self.assertEqual(Pet.objects.using("default").count(), 0) self.assertEqual(Person.objects.using("other").count(), 1) self.assertEqual(Pet.objects.using("other").count(), 1) # Delete the person object, which will cascade onto the pet mark.delete(using="other") self.assertEqual(Person.objects.using("default").count(), 0) self.assertEqual(Pet.objects.using("default").count(), 0) # Both the pet and the person have been deleted from the right database self.assertEqual(Person.objects.using("other").count(), 0) self.assertEqual(Pet.objects.using("other").count(), 0) def test_foreign_key_validation(self): "ForeignKey.validate() uses the correct database" mickey = Person.objects.using("other").create(name="Mickey") pluto = Pet.objects.using("other").create(name="Pluto", owner=mickey) self.assertIsNone(pluto.full_clean()) # Any router that accesses `model` in db_for_read() works here. @override_settings(DATABASE_ROUTERS=[AuthRouter()]) def test_foreign_key_validation_with_router(self): """ ForeignKey.validate() passes `model` to db_for_read() even if model_instance=None. """ mickey = Person.objects.create(name="Mickey") owner_field = Pet._meta.get_field("owner") self.assertEqual(owner_field.clean(mickey.pk, None), mickey.pk) def test_o2o_separation(self): "OneToOne fields are constrained to a single database" # Create a user and profile on the default database alice = User.objects.db_manager("default").create_user( "alice", "[email protected]" ) alice_profile = UserProfile.objects.using("default").create( user=alice, flavor="chocolate" ) # Create a user and profile on the other database bob = User.objects.db_manager("other").create_user("bob", "[email protected]") bob_profile = UserProfile.objects.using("other").create( user=bob, flavor="crunchy frog" ) # Retrieve related objects; queries should be database constrained alice = User.objects.using("default").get(username="alice") self.assertEqual(alice.userprofile.flavor, "chocolate") bob = User.objects.using("other").get(username="bob") self.assertEqual(bob.userprofile.flavor, "crunchy frog") # Queries work across joins self.assertEqual( list( User.objects.using("default") .filter(userprofile__flavor="chocolate") .values_list("username", flat=True) ), ["alice"], ) self.assertEqual( list( User.objects.using("other") .filter(userprofile__flavor="chocolate") .values_list("username", flat=True) ), [], ) self.assertEqual( list( User.objects.using("default") .filter(userprofile__flavor="crunchy frog") .values_list("username", flat=True) ), [], ) self.assertEqual( list( User.objects.using("other") .filter(userprofile__flavor="crunchy frog") .values_list("username", flat=True) ), ["bob"], ) # Reget the objects to clear caches alice_profile = UserProfile.objects.using("default").get(flavor="chocolate") bob_profile = UserProfile.objects.using("other").get(flavor="crunchy frog") # Retrieve related object by descriptor. Related objects should be # database-bound. self.assertEqual(alice_profile.user.username, "alice") self.assertEqual(bob_profile.user.username, "bob") def test_o2o_cross_database_protection(self): "Operations that involve sharing FK objects across databases raise an error" # Create a user and profile on the default database alice = User.objects.db_manager("default").create_user( "alice", "[email protected]" ) # Create a user and profile on the other database bob = User.objects.db_manager("other").create_user("bob", "[email protected]") # Set a one-to-one relation with an object from a different database alice_profile = UserProfile.objects.using("default").create( user=alice, flavor="chocolate" ) msg = ( 'Cannot assign "%r": the current database router prevents this ' "relation." % alice_profile ) with self.assertRaisesMessage(ValueError, msg): bob.userprofile = alice_profile # BUT! if you assign a FK object when the base object hasn't # been saved yet, you implicitly assign the database for the # base object. bob_profile = UserProfile.objects.using("other").create( user=bob, flavor="crunchy frog" ) new_bob_profile = UserProfile(flavor="spring surprise") # assigning a profile requires an explicit pk as the object isn't saved charlie = User(pk=51, username="charlie", email="[email protected]") charlie.set_unusable_password() # initially, no db assigned self.assertIsNone(new_bob_profile._state.db) self.assertIsNone(charlie._state.db) # old object comes from 'other', so the new object is set to use 'other'... new_bob_profile.user = bob charlie.userprofile = bob_profile self.assertEqual(new_bob_profile._state.db, "other") self.assertEqual(charlie._state.db, "other") # ... but it isn't saved yet self.assertEqual( list(User.objects.using("other").values_list("username", flat=True)), ["bob"], ) self.assertEqual( list(UserProfile.objects.using("other").values_list("flavor", flat=True)), ["crunchy frog"], ) # When saved (no using required), new objects goes to 'other' charlie.save() bob_profile.save() new_bob_profile.save() self.assertEqual( list(User.objects.using("default").values_list("username", flat=True)), ["alice"], ) self.assertEqual( list(User.objects.using("other").values_list("username", flat=True)), ["bob", "charlie"], ) self.assertEqual( list(UserProfile.objects.using("default").values_list("flavor", flat=True)), ["chocolate"], ) self.assertEqual( list(UserProfile.objects.using("other").values_list("flavor", flat=True)), ["crunchy frog", "spring surprise"], ) # This also works if you assign the O2O relation in the constructor denise = User.objects.db_manager("other").create_user( "denise", "[email protected]" ) denise_profile = UserProfile(flavor="tofu", user=denise) self.assertEqual(denise_profile._state.db, "other") # ... but it isn't saved yet self.assertEqual( list(UserProfile.objects.using("default").values_list("flavor", flat=True)), ["chocolate"], ) self.assertEqual( list(UserProfile.objects.using("other").values_list("flavor", flat=True)), ["crunchy frog", "spring surprise"], ) # When saved, the new profile goes to 'other' denise_profile.save() self.assertEqual( list(UserProfile.objects.using("default").values_list("flavor", flat=True)), ["chocolate"], ) self.assertEqual( list(UserProfile.objects.using("other").values_list("flavor", flat=True)), ["crunchy frog", "spring surprise", "tofu"], ) def test_generic_key_separation(self): "Generic fields are constrained to a single database" # Create a book and author on the default database pro = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) review1 = Review.objects.create(source="Python Monthly", content_object=pro) # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) review2 = Review.objects.using("other").create( source="Python Weekly", content_object=dive ) review1 = Review.objects.using("default").get(source="Python Monthly") self.assertEqual(review1.content_object.title, "Pro Django") review2 = Review.objects.using("other").get(source="Python Weekly") self.assertEqual(review2.content_object.title, "Dive into Python") # Reget the objects to clear caches dive = Book.objects.using("other").get(title="Dive into Python") # Retrieve related object by descriptor. Related objects should be # database-bound. self.assertEqual( list(dive.reviews.values_list("source", flat=True)), ["Python Weekly"] ) def test_generic_key_reverse_operations(self): "Generic reverse manipulations are all constrained to a single DB" dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) temp = Book.objects.using("other").create( title="Temp", published=datetime.date(2009, 5, 4) ) review1 = Review.objects.using("other").create( source="Python Weekly", content_object=dive ) review2 = Review.objects.using("other").create( source="Python Monthly", content_object=temp ) self.assertEqual( list( Review.objects.using("default") .filter(object_id=dive.pk) .values_list("source", flat=True) ), [], ) self.assertEqual( list( Review.objects.using("other") .filter(object_id=dive.pk) .values_list("source", flat=True) ), ["Python Weekly"], ) # Add a second review dive.reviews.add(review2) self.assertEqual( list( Review.objects.using("default") .filter(object_id=dive.pk) .values_list("source", flat=True) ), [], ) self.assertEqual( list( Review.objects.using("other") .filter(object_id=dive.pk) .values_list("source", flat=True) ), ["Python Monthly", "Python Weekly"], ) # Remove the second author dive.reviews.remove(review1) self.assertEqual( list( Review.objects.using("default") .filter(object_id=dive.pk) .values_list("source", flat=True) ), [], ) self.assertEqual( list( Review.objects.using("other") .filter(object_id=dive.pk) .values_list("source", flat=True) ), ["Python Monthly"], ) # Clear all reviews dive.reviews.clear() self.assertEqual( list( Review.objects.using("default") .filter(object_id=dive.pk) .values_list("source", flat=True) ), [], ) self.assertEqual( list( Review.objects.using("other") .filter(object_id=dive.pk) .values_list("source", flat=True) ), [], ) # Create an author through the generic interface dive.reviews.create(source="Python Daily") self.assertEqual( list( Review.objects.using("default") .filter(object_id=dive.pk) .values_list("source", flat=True) ), [], ) self.assertEqual( list( Review.objects.using("other") .filter(object_id=dive.pk) .values_list("source", flat=True) ), ["Python Daily"], ) def test_generic_key_cross_database_protection(self): """ Operations that involve sharing generic key objects across databases raise an error. """ # Create a book and author on the default database pro = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) review1 = Review.objects.create(source="Python Monthly", content_object=pro) # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) Review.objects.using("other").create( source="Python Weekly", content_object=dive ) # Set a foreign key with an object from a different database msg = ( 'Cannot assign "<ContentType: multiple_database | book>": the ' "current database router prevents this relation." ) with self.assertRaisesMessage(ValueError, msg): review1.content_object = dive # Add to a foreign key set with an object from a different database msg = ( "<Review: Python Monthly> instance isn't saved. " "Use bulk=False or save the object first." ) with self.assertRaisesMessage(ValueError, msg): with transaction.atomic(using="other"): dive.reviews.add(review1) # BUT! if you assign a FK object when the base object hasn't # been saved yet, you implicitly assign the database for the # base object. review3 = Review(source="Python Daily") # initially, no db assigned self.assertIsNone(review3._state.db) # Dive comes from 'other', so review3 is set to use 'other'... review3.content_object = dive self.assertEqual(review3._state.db, "other") # ... but it isn't saved yet self.assertEqual( list( Review.objects.using("default") .filter(object_id=pro.pk) .values_list("source", flat=True) ), ["Python Monthly"], ) self.assertEqual( list( Review.objects.using("other") .filter(object_id=dive.pk) .values_list("source", flat=True) ), ["Python Weekly"], ) # When saved, John goes to 'other' review3.save() self.assertEqual( list( Review.objects.using("default") .filter(object_id=pro.pk) .values_list("source", flat=True) ), ["Python Monthly"], ) self.assertEqual( list( Review.objects.using("other") .filter(object_id=dive.pk) .values_list("source", flat=True) ), ["Python Daily", "Python Weekly"], ) def test_generic_key_deletion(self): """ Cascaded deletions of Generic Key relations issue queries on the right database. """ dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) Review.objects.using("other").create( source="Python Weekly", content_object=dive ) # Check the initial state self.assertEqual(Book.objects.using("default").count(), 0) self.assertEqual(Review.objects.using("default").count(), 0) self.assertEqual(Book.objects.using("other").count(), 1) self.assertEqual(Review.objects.using("other").count(), 1) # Delete the Book object, which will cascade onto the pet dive.delete(using="other") self.assertEqual(Book.objects.using("default").count(), 0) self.assertEqual(Review.objects.using("default").count(), 0) # Both the pet and the person have been deleted from the right database self.assertEqual(Book.objects.using("other").count(), 0) self.assertEqual(Review.objects.using("other").count(), 0) def test_ordering(self): "get_next_by_XXX commands stick to a single database" Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16)) dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) learn = Book.objects.using("other").create( title="Learning Python", published=datetime.date(2008, 7, 16) ) self.assertEqual(learn.get_next_by_published().title, "Dive into Python") self.assertEqual(dive.get_previous_by_published().title, "Learning Python") def test_raw(self): "test the raw() method across databases" dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) val = Book.objects.db_manager("other").raw( "SELECT id FROM multiple_database_book" ) self.assertQuerySetEqual(val, [dive.pk], attrgetter("pk")) val = Book.objects.raw("SELECT id FROM multiple_database_book").using("other") self.assertQuerySetEqual(val, [dive.pk], attrgetter("pk")) def test_select_related(self): """ Database assignment is retained if an object is retrieved with select_related(). """ # Create a book and author on the other database mark = Person.objects.using("other").create(name="Mark Pilgrim") Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4), editor=mark, ) # Retrieve the Person using select_related() book = ( Book.objects.using("other") .select_related("editor") .get(title="Dive into Python") ) # The editor instance should have a db state self.assertEqual(book.editor._state.db, "other") def test_subquery(self): """Make sure as_sql works with subqueries and primary/replica.""" sub = Person.objects.using("other").filter(name="fff") qs = Book.objects.filter(editor__in=sub) # When you call __str__ on the query object, it doesn't know about using # so it falls back to the default. If the subquery explicitly uses a # different database, an error should be raised. msg = ( "Subqueries aren't allowed across different databases. Force the " "inner query to be evaluated using `list(inner_query)`." ) with self.assertRaisesMessage(ValueError, msg): str(qs.query) # Evaluating the query shouldn't work, either with self.assertRaisesMessage(ValueError, msg): for obj in qs: pass def test_related_manager(self): "Related managers return managers, not querysets" mark = Person.objects.using("other").create(name="Mark Pilgrim") # extra_arg is removed by the BookManager's implementation of # create(); but the BookManager's implementation won't get called # unless edited returns a Manager, not a queryset mark.book_set.create( title="Dive into Python", published=datetime.date(2009, 5, 4), extra_arg=True, ) mark.book_set.get_or_create( title="Dive into Python", published=datetime.date(2009, 5, 4), extra_arg=True, ) mark.edited.create( title="Dive into Water", published=datetime.date(2009, 5, 4), extra_arg=True ) mark.edited.get_or_create( title="Dive into Water", published=datetime.date(2009, 5, 4), extra_arg=True ) class ConnectionRouterTestCase(SimpleTestCase): @override_settings( DATABASE_ROUTERS=[ "multiple_database.tests.TestRouter", "multiple_database.tests.WriteRouter", ] ) def test_router_init_default(self): connection_router = ConnectionRouter() self.assertEqual( [r.__class__.__name__ for r in connection_router.routers], ["TestRouter", "WriteRouter"], ) def test_router_init_arg(self): connection_router = ConnectionRouter( [ "multiple_database.tests.TestRouter", "multiple_database.tests.WriteRouter", ] ) self.assertEqual( [r.__class__.__name__ for r in connection_router.routers], ["TestRouter", "WriteRouter"], ) # Init with instances instead of strings connection_router = ConnectionRouter([TestRouter(), WriteRouter()]) self.assertEqual( [r.__class__.__name__ for r in connection_router.routers], ["TestRouter", "WriteRouter"], ) # Make the 'other' database appear to be a replica of the 'default' @override_settings(DATABASE_ROUTERS=[TestRouter()]) class RouterTestCase(TestCase): databases = {"default", "other"} def test_db_selection(self): "Querysets obey the router for db suggestions" self.assertEqual(Book.objects.db, "other") self.assertEqual(Book.objects.all().db, "other") self.assertEqual(Book.objects.using("default").db, "default") self.assertEqual(Book.objects.db_manager("default").db, "default") self.assertEqual(Book.objects.db_manager("default").all().db, "default") def test_migrate_selection(self): "Synchronization behavior is predictable" self.assertTrue(router.allow_migrate_model("default", User)) self.assertTrue(router.allow_migrate_model("default", Book)) self.assertTrue(router.allow_migrate_model("other", User)) self.assertTrue(router.allow_migrate_model("other", Book)) with override_settings(DATABASE_ROUTERS=[TestRouter(), AuthRouter()]): # Add the auth router to the chain. TestRouter is a universal # synchronizer, so it should have no effect. self.assertTrue(router.allow_migrate_model("default", User)) self.assertTrue(router.allow_migrate_model("default", Book)) self.assertTrue(router.allow_migrate_model("other", User)) self.assertTrue(router.allow_migrate_model("other", Book)) with override_settings(DATABASE_ROUTERS=[AuthRouter(), TestRouter()]): # Now check what happens if the router order is reversed. self.assertFalse(router.allow_migrate_model("default", User)) self.assertTrue(router.allow_migrate_model("default", Book)) self.assertTrue(router.allow_migrate_model("other", User)) self.assertTrue(router.allow_migrate_model("other", Book)) def test_partial_router(self): "A router can choose to implement a subset of methods" dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) # First check the baseline behavior. self.assertEqual(router.db_for_read(User), "other") self.assertEqual(router.db_for_read(Book), "other") self.assertEqual(router.db_for_write(User), "default") self.assertEqual(router.db_for_write(Book), "default") self.assertTrue(router.allow_relation(dive, dive)) self.assertTrue(router.allow_migrate_model("default", User)) self.assertTrue(router.allow_migrate_model("default", Book)) with override_settings( DATABASE_ROUTERS=[WriteRouter(), AuthRouter(), TestRouter()] ): self.assertEqual(router.db_for_read(User), "default") self.assertEqual(router.db_for_read(Book), "other") self.assertEqual(router.db_for_write(User), "writer") self.assertEqual(router.db_for_write(Book), "writer") self.assertTrue(router.allow_relation(dive, dive)) self.assertFalse(router.allow_migrate_model("default", User)) self.assertTrue(router.allow_migrate_model("default", Book)) def test_database_routing(self): marty = Person.objects.using("default").create(name="Marty Alchin") pro = Book.objects.using("default").create( title="Pro Django", published=datetime.date(2008, 12, 16), editor=marty, ) pro.authors.set([marty]) # Create a book and author on the other database Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) # An update query will be routed to the default database Book.objects.filter(title="Pro Django").update(pages=200) with self.assertRaises(Book.DoesNotExist): # By default, the get query will be directed to 'other' Book.objects.get(title="Pro Django") # But the same query issued explicitly at a database will work. pro = Book.objects.using("default").get(title="Pro Django") # The update worked. self.assertEqual(pro.pages, 200) # An update query with an explicit using clause will be routed # to the requested database. Book.objects.using("other").filter(title="Dive into Python").update(pages=300) self.assertEqual(Book.objects.get(title="Dive into Python").pages, 300) # Related object queries stick to the same database # as the original object, regardless of the router self.assertEqual( list(pro.authors.values_list("name", flat=True)), ["Marty Alchin"] ) self.assertEqual(pro.editor.name, "Marty Alchin") # get_or_create is a special case. The get needs to be targeted at # the write database in order to avoid potential transaction # consistency problems book, created = Book.objects.get_or_create(title="Pro Django") self.assertFalse(created) book, created = Book.objects.get_or_create( title="Dive Into Python", defaults={"published": datetime.date(2009, 5, 4)} ) self.assertTrue(created) # Check the head count of objects self.assertEqual(Book.objects.using("default").count(), 2) self.assertEqual(Book.objects.using("other").count(), 1) # If a database isn't specified, the read database is used self.assertEqual(Book.objects.count(), 1) # A delete query will also be routed to the default database Book.objects.filter(pages__gt=150).delete() # The default database has lost the book. self.assertEqual(Book.objects.using("default").count(), 1) self.assertEqual(Book.objects.using("other").count(), 1) def test_invalid_set_foreign_key_assignment(self): marty = Person.objects.using("default").create(name="Marty Alchin") dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4), ) # Set a foreign key set with an object from a different database msg = ( "<Book: Dive into Python> instance isn't saved. Use bulk=False or save the " "object first." ) with self.assertRaisesMessage(ValueError, msg): marty.edited.set([dive]) def test_foreign_key_cross_database_protection(self): "Foreign keys can cross databases if they two databases have a common source" # Create a book and author on the default database pro = Book.objects.using("default").create( title="Pro Django", published=datetime.date(2008, 12, 16) ) marty = Person.objects.using("default").create(name="Marty Alchin") # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) mark = Person.objects.using("other").create(name="Mark Pilgrim") # Set a foreign key with an object from a different database dive.editor = marty # Database assignments of original objects haven't changed... self.assertEqual(marty._state.db, "default") self.assertEqual(pro._state.db, "default") self.assertEqual(dive._state.db, "other") self.assertEqual(mark._state.db, "other") # ... but they will when the affected object is saved. dive.save() self.assertEqual(dive._state.db, "default") # ...and the source database now has a copy of any object saved Book.objects.using("default").get(title="Dive into Python").delete() # This isn't a real primary/replica database, so restore the original from other dive = Book.objects.using("other").get(title="Dive into Python") self.assertEqual(dive._state.db, "other") # Set a foreign key set with an object from a different database marty.edited.set([pro, dive], bulk=False) # Assignment implies a save, so database assignments of original # objects have changed... self.assertEqual(marty._state.db, "default") self.assertEqual(pro._state.db, "default") self.assertEqual(dive._state.db, "default") self.assertEqual(mark._state.db, "other") # ...and the source database now has a copy of any object saved Book.objects.using("default").get(title="Dive into Python").delete() # This isn't a real primary/replica database, so restore the original from other dive = Book.objects.using("other").get(title="Dive into Python") self.assertEqual(dive._state.db, "other") # Add to a foreign key set with an object from a different database marty.edited.add(dive, bulk=False) # Add implies a save, so database assignments of original objects have # changed... self.assertEqual(marty._state.db, "default") self.assertEqual(pro._state.db, "default") self.assertEqual(dive._state.db, "default") self.assertEqual(mark._state.db, "other") # ...and the source database now has a copy of any object saved Book.objects.using("default").get(title="Dive into Python").delete() # This isn't a real primary/replica database, so restore the original from other dive = Book.objects.using("other").get(title="Dive into Python") # If you assign a FK object when the base object hasn't # been saved yet, you implicitly assign the database for the # base object. chris = Person(name="Chris Mills") html5 = Book(title="Dive into HTML5", published=datetime.date(2010, 3, 15)) # initially, no db assigned self.assertIsNone(chris._state.db) self.assertIsNone(html5._state.db) # old object comes from 'other', so the new object is set to use the # source of 'other'... self.assertEqual(dive._state.db, "other") chris.save() dive.editor = chris html5.editor = mark self.assertEqual(dive._state.db, "other") self.assertEqual(mark._state.db, "other") self.assertEqual(chris._state.db, "default") self.assertEqual(html5._state.db, "default") # This also works if you assign the FK in the constructor water = Book( title="Dive into Water", published=datetime.date(2001, 1, 1), editor=mark ) self.assertEqual(water._state.db, "default") # For the remainder of this test, create a copy of 'mark' in the # 'default' database to prevent integrity errors on backends that # don't defer constraints checks until the end of the transaction mark.save(using="default") # This moved 'mark' in the 'default' database, move it back in 'other' mark.save(using="other") self.assertEqual(mark._state.db, "other") # If you create an object through a FK relation, it will be # written to the write database, even if the original object # was on the read database cheesecake = mark.edited.create( title="Dive into Cheesecake", published=datetime.date(2010, 3, 15) ) self.assertEqual(cheesecake._state.db, "default") # Same goes for get_or_create, regardless of whether getting or creating cheesecake, created = mark.edited.get_or_create( title="Dive into Cheesecake", published=datetime.date(2010, 3, 15), ) self.assertEqual(cheesecake._state.db, "default") puddles, created = mark.edited.get_or_create( title="Dive into Puddles", published=datetime.date(2010, 3, 15) ) self.assertEqual(puddles._state.db, "default") def test_m2m_cross_database_protection(self): "M2M relations can cross databases if the database share a source" # Create books and authors on the inverse to the usual database pro = Book.objects.using("other").create( pk=1, title="Pro Django", published=datetime.date(2008, 12, 16) ) marty = Person.objects.using("other").create(pk=1, name="Marty Alchin") dive = Book.objects.using("default").create( pk=2, title="Dive into Python", published=datetime.date(2009, 5, 4) ) mark = Person.objects.using("default").create(pk=2, name="Mark Pilgrim") # Now save back onto the usual database. # This simulates primary/replica - the objects exist on both database, # but the _state.db is as it is for all other tests. pro.save(using="default") marty.save(using="default") dive.save(using="other") mark.save(using="other") # We have 2 of both types of object on both databases self.assertEqual(Book.objects.using("default").count(), 2) self.assertEqual(Book.objects.using("other").count(), 2) self.assertEqual(Person.objects.using("default").count(), 2) self.assertEqual(Person.objects.using("other").count(), 2) # Set a m2m set with an object from a different database marty.book_set.set([pro, dive]) # Database assignments don't change self.assertEqual(marty._state.db, "default") self.assertEqual(pro._state.db, "default") self.assertEqual(dive._state.db, "other") self.assertEqual(mark._state.db, "other") # All m2m relations should be saved on the default database self.assertEqual(Book.authors.through.objects.using("default").count(), 2) self.assertEqual(Book.authors.through.objects.using("other").count(), 0) # Reset relations Book.authors.through.objects.using("default").delete() # Add to an m2m with an object from a different database marty.book_set.add(dive) # Database assignments don't change self.assertEqual(marty._state.db, "default") self.assertEqual(pro._state.db, "default") self.assertEqual(dive._state.db, "other") self.assertEqual(mark._state.db, "other") # All m2m relations should be saved on the default database self.assertEqual(Book.authors.through.objects.using("default").count(), 1) self.assertEqual(Book.authors.through.objects.using("other").count(), 0) # Reset relations Book.authors.through.objects.using("default").delete() # Set a reverse m2m with an object from a different database dive.authors.set([mark, marty]) # Database assignments don't change self.assertEqual(marty._state.db, "default") self.assertEqual(pro._state.db, "default") self.assertEqual(dive._state.db, "other") self.assertEqual(mark._state.db, "other") # All m2m relations should be saved on the default database self.assertEqual(Book.authors.through.objects.using("default").count(), 2) self.assertEqual(Book.authors.through.objects.using("other").count(), 0) # Reset relations Book.authors.through.objects.using("default").delete() self.assertEqual(Book.authors.through.objects.using("default").count(), 0) self.assertEqual(Book.authors.through.objects.using("other").count(), 0) # Add to a reverse m2m with an object from a different database dive.authors.add(marty) # Database assignments don't change self.assertEqual(marty._state.db, "default") self.assertEqual(pro._state.db, "default") self.assertEqual(dive._state.db, "other") self.assertEqual(mark._state.db, "other") # All m2m relations should be saved on the default database self.assertEqual(Book.authors.through.objects.using("default").count(), 1) self.assertEqual(Book.authors.through.objects.using("other").count(), 0) # If you create an object through a M2M relation, it will be # written to the write database, even if the original object # was on the read database alice = dive.authors.create(name="Alice", pk=3) self.assertEqual(alice._state.db, "default") # Same goes for get_or_create, regardless of whether getting or creating alice, created = dive.authors.get_or_create(name="Alice") self.assertEqual(alice._state.db, "default") bob, created = dive.authors.get_or_create(name="Bob", defaults={"pk": 4}) self.assertEqual(bob._state.db, "default") def test_o2o_cross_database_protection(self): "Operations that involve sharing FK objects across databases raise an error" # Create a user and profile on the default database alice = User.objects.db_manager("default").create_user( "alice", "[email protected]" ) # Create a user and profile on the other database bob = User.objects.db_manager("other").create_user("bob", "[email protected]") # Set a one-to-one relation with an object from a different database alice_profile = UserProfile.objects.create(user=alice, flavor="chocolate") bob.userprofile = alice_profile # Database assignments of original objects haven't changed... self.assertEqual(alice._state.db, "default") self.assertEqual(alice_profile._state.db, "default") self.assertEqual(bob._state.db, "other") # ... but they will when the affected object is saved. bob.save() self.assertEqual(bob._state.db, "default") def test_generic_key_cross_database_protection(self): "Generic Key operations can span databases if they share a source" # Create a book and author on the default database pro = Book.objects.using("default").create( title="Pro Django", published=datetime.date(2008, 12, 16) ) review1 = Review.objects.using("default").create( source="Python Monthly", content_object=pro ) # Create a book and author on the other database dive = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) review2 = Review.objects.using("other").create( source="Python Weekly", content_object=dive ) # Set a generic foreign key with an object from a different database review1.content_object = dive # Database assignments of original objects haven't changed... self.assertEqual(pro._state.db, "default") self.assertEqual(review1._state.db, "default") self.assertEqual(dive._state.db, "other") self.assertEqual(review2._state.db, "other") # ... but they will when the affected object is saved. dive.save() self.assertEqual(review1._state.db, "default") self.assertEqual(dive._state.db, "default") # ...and the source database now has a copy of any object saved Book.objects.using("default").get(title="Dive into Python").delete() # This isn't a real primary/replica database, so restore the original from other dive = Book.objects.using("other").get(title="Dive into Python") self.assertEqual(dive._state.db, "other") # Add to a generic foreign key set with an object from a different database dive.reviews.add(review1) # Database assignments of original objects haven't changed... self.assertEqual(pro._state.db, "default") self.assertEqual(review1._state.db, "default") self.assertEqual(dive._state.db, "other") self.assertEqual(review2._state.db, "other") # ... but they will when the affected object is saved. dive.save() self.assertEqual(dive._state.db, "default") # ...and the source database now has a copy of any object saved Book.objects.using("default").get(title="Dive into Python").delete() # BUT! if you assign a FK object when the base object hasn't # been saved yet, you implicitly assign the database for the # base object. review3 = Review(source="Python Daily") # initially, no db assigned self.assertIsNone(review3._state.db) # Dive comes from 'other', so review3 is set to use the source of 'other'... review3.content_object = dive self.assertEqual(review3._state.db, "default") # If you create an object through a M2M relation, it will be # written to the write database, even if the original object # was on the read database dive = Book.objects.using("other").get(title="Dive into Python") nyt = dive.reviews.create(source="New York Times", content_object=dive) self.assertEqual(nyt._state.db, "default") def test_m2m_managers(self): "M2M relations are represented by managers, and can be controlled like managers" pro = Book.objects.using("other").create( pk=1, title="Pro Django", published=datetime.date(2008, 12, 16) ) marty = Person.objects.using("other").create(pk=1, name="Marty Alchin") self.assertEqual(pro.authors.db, "other") self.assertEqual(pro.authors.db_manager("default").db, "default") self.assertEqual(pro.authors.db_manager("default").all().db, "default") self.assertEqual(marty.book_set.db, "other") self.assertEqual(marty.book_set.db_manager("default").db, "default") self.assertEqual(marty.book_set.db_manager("default").all().db, "default") def test_foreign_key_managers(self): """ FK reverse relations are represented by managers, and can be controlled like managers. """ marty = Person.objects.using("other").create(pk=1, name="Marty Alchin") Book.objects.using("other").create( pk=1, title="Pro Django", published=datetime.date(2008, 12, 16), editor=marty, ) self.assertEqual(marty.edited.db, "other") self.assertEqual(marty.edited.db_manager("default").db, "default") self.assertEqual(marty.edited.db_manager("default").all().db, "default") def test_generic_key_managers(self): """ Generic key relations are represented by managers, and can be controlled like managers. """ pro = Book.objects.using("other").create( title="Pro Django", published=datetime.date(2008, 12, 16) ) Review.objects.using("other").create( source="Python Monthly", content_object=pro ) self.assertEqual(pro.reviews.db, "other") self.assertEqual(pro.reviews.db_manager("default").db, "default") self.assertEqual(pro.reviews.db_manager("default").all().db, "default") def test_subquery(self): """Make sure as_sql works with subqueries and primary/replica.""" # Create a book and author on the other database mark = Person.objects.using("other").create(name="Mark Pilgrim") Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4), editor=mark, ) sub = Person.objects.filter(name="Mark Pilgrim") qs = Book.objects.filter(editor__in=sub) # When you call __str__ on the query object, it doesn't know about using # so it falls back to the default. Don't let routing instructions # force the subquery to an incompatible database. str(qs.query) # If you evaluate the query, it should work, running on 'other' self.assertEqual(list(qs.values_list("title", flat=True)), ["Dive into Python"]) def test_deferred_models(self): mark_def = Person.objects.using("default").create(name="Mark Pilgrim") mark_other = Person.objects.using("other").create(name="Mark Pilgrim") orig_b = Book.objects.using("other").create( title="Dive into Python", published=datetime.date(2009, 5, 4), editor=mark_other, ) b = Book.objects.using("other").only("title").get(pk=orig_b.pk) self.assertEqual(b.published, datetime.date(2009, 5, 4)) b = Book.objects.using("other").only("title").get(pk=orig_b.pk) b.editor = mark_def b.save(using="default") self.assertEqual( Book.objects.using("default").get(pk=b.pk).published, datetime.date(2009, 5, 4), ) @override_settings(DATABASE_ROUTERS=[AuthRouter()]) class AuthTestCase(TestCase): databases = {"default", "other"} def test_auth_manager(self): "The methods on the auth manager obey database hints" # Create one user using default allocation policy User.objects.create_user("alice", "[email protected]") # Create another user, explicitly specifying the database User.objects.db_manager("default").create_user("bob", "[email protected]") # The second user only exists on the other database alice = User.objects.using("other").get(username="alice") self.assertEqual(alice.username, "alice") self.assertEqual(alice._state.db, "other") with self.assertRaises(User.DoesNotExist): User.objects.using("default").get(username="alice") # The second user only exists on the default database bob = User.objects.using("default").get(username="bob") self.assertEqual(bob.username, "bob") self.assertEqual(bob._state.db, "default") with self.assertRaises(User.DoesNotExist): User.objects.using("other").get(username="bob") # That is... there is one user on each database self.assertEqual(User.objects.using("default").count(), 1) self.assertEqual(User.objects.using("other").count(), 1) def test_dumpdata(self): "dumpdata honors allow_migrate restrictions on the router" User.objects.create_user("alice", "[email protected]") User.objects.db_manager("default").create_user("bob", "[email protected]") # dumping the default database doesn't try to include auth because # allow_migrate prohibits auth on default new_io = StringIO() management.call_command( "dumpdata", "auth", format="json", database="default", stdout=new_io ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, "[]") # dumping the other database does include auth new_io = StringIO() management.call_command( "dumpdata", "auth", format="json", database="other", stdout=new_io ) command_output = new_io.getvalue().strip() self.assertIn('"email": "[email protected]"', command_output) class AntiPetRouter: # A router that only expresses an opinion on migrate, # passing pets to the 'other' database def allow_migrate(self, db, app_label, model_name=None, **hints): if db == "other": return model_name == "pet" else: return model_name != "pet" class FixtureTestCase(TestCase): databases = {"default", "other"} fixtures = ["multidb-common", "multidb"] @override_settings(DATABASE_ROUTERS=[AntiPetRouter()]) def test_fixture_loading(self): "Multi-db fixtures are loaded correctly" # "Pro Django" exists on the default database, but not on other database Book.objects.get(title="Pro Django") Book.objects.using("default").get(title="Pro Django") with self.assertRaises(Book.DoesNotExist): Book.objects.using("other").get(title="Pro Django") # "Dive into Python" exists on the default database, but not on other database Book.objects.using("other").get(title="Dive into Python") with self.assertRaises(Book.DoesNotExist): Book.objects.get(title="Dive into Python") with self.assertRaises(Book.DoesNotExist): Book.objects.using("default").get(title="Dive into Python") # "Definitive Guide" exists on the both databases Book.objects.get(title="The Definitive Guide to Django") Book.objects.using("default").get(title="The Definitive Guide to Django") Book.objects.using("other").get(title="The Definitive Guide to Django") @override_settings(DATABASE_ROUTERS=[AntiPetRouter()]) def test_pseudo_empty_fixtures(self): """ A fixture can contain entries, but lead to nothing in the database; this shouldn't raise an error (#14068). """ new_io = StringIO() management.call_command("loaddata", "pets", stdout=new_io, stderr=new_io) command_output = new_io.getvalue().strip() # No objects will actually be loaded self.assertEqual( command_output, "Installed 0 object(s) (of 2) from 1 fixture(s)" ) class PickleQuerySetTestCase(TestCase): databases = {"default", "other"} def test_pickling(self): for db in self.databases: Book.objects.using(db).create( title="Dive into Python", published=datetime.date(2009, 5, 4) ) qs = Book.objects.all() self.assertEqual(qs.db, pickle.loads(pickle.dumps(qs)).db) class DatabaseReceiver: """ Used in the tests for the database argument in signals (#13552) """ def __call__(self, signal, sender, **kwargs): self._database = kwargs["using"] class WriteToOtherRouter: """ A router that sends all writes to the other database. """ def db_for_write(self, model, **hints): return "other" class SignalTests(TestCase): databases = {"default", "other"} def override_router(self): return override_settings(DATABASE_ROUTERS=[WriteToOtherRouter()]) def test_database_arg_save_and_delete(self): """ The pre/post_save signal contains the correct database. """ # Make some signal receivers pre_save_receiver = DatabaseReceiver() post_save_receiver = DatabaseReceiver() pre_delete_receiver = DatabaseReceiver() post_delete_receiver = DatabaseReceiver() # Make model and connect receivers signals.pre_save.connect(sender=Person, receiver=pre_save_receiver) signals.post_save.connect(sender=Person, receiver=post_save_receiver) signals.pre_delete.connect(sender=Person, receiver=pre_delete_receiver) signals.post_delete.connect(sender=Person, receiver=post_delete_receiver) p = Person.objects.create(name="Darth Vader") # Save and test receivers got calls p.save() self.assertEqual(pre_save_receiver._database, DEFAULT_DB_ALIAS) self.assertEqual(post_save_receiver._database, DEFAULT_DB_ALIAS) # Delete, and test p.delete() self.assertEqual(pre_delete_receiver._database, DEFAULT_DB_ALIAS) self.assertEqual(post_delete_receiver._database, DEFAULT_DB_ALIAS) # Save again to a different database p.save(using="other") self.assertEqual(pre_save_receiver._database, "other") self.assertEqual(post_save_receiver._database, "other") # Delete, and test p.delete(using="other") self.assertEqual(pre_delete_receiver._database, "other") self.assertEqual(post_delete_receiver._database, "other") signals.pre_save.disconnect(sender=Person, receiver=pre_save_receiver) signals.post_save.disconnect(sender=Person, receiver=post_save_receiver) signals.pre_delete.disconnect(sender=Person, receiver=pre_delete_receiver) signals.post_delete.disconnect(sender=Person, receiver=post_delete_receiver) def test_database_arg_m2m(self): """ The m2m_changed signal has a correct database arg. """ # Make a receiver receiver = DatabaseReceiver() # Connect it signals.m2m_changed.connect(receiver=receiver) # Create the models that will be used for the tests b = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) p = Person.objects.create(name="Marty Alchin") # Create a copy of the models on the 'other' database to prevent # integrity errors on backends that don't defer constraints checks Book.objects.using("other").create( pk=b.pk, title=b.title, published=b.published ) Person.objects.using("other").create(pk=p.pk, name=p.name) # Test addition b.authors.add(p) self.assertEqual(receiver._database, DEFAULT_DB_ALIAS) with self.override_router(): b.authors.add(p) self.assertEqual(receiver._database, "other") # Test removal b.authors.remove(p) self.assertEqual(receiver._database, DEFAULT_DB_ALIAS) with self.override_router(): b.authors.remove(p) self.assertEqual(receiver._database, "other") # Test addition in reverse p.book_set.add(b) self.assertEqual(receiver._database, DEFAULT_DB_ALIAS) with self.override_router(): p.book_set.add(b) self.assertEqual(receiver._database, "other") # Test clearing b.authors.clear() self.assertEqual(receiver._database, DEFAULT_DB_ALIAS) with self.override_router(): b.authors.clear() self.assertEqual(receiver._database, "other") class AttributeErrorRouter: "A router to test the exception handling of ConnectionRouter" def db_for_read(self, model, **hints): raise AttributeError def db_for_write(self, model, **hints): raise AttributeError class RouterAttributeErrorTestCase(TestCase): databases = {"default", "other"} def override_router(self): return override_settings(DATABASE_ROUTERS=[AttributeErrorRouter()]) def test_attribute_error_read(self): "The AttributeError from AttributeErrorRouter bubbles up" b = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) with self.override_router(): with self.assertRaises(AttributeError): Book.objects.get(pk=b.pk) def test_attribute_error_save(self): "The AttributeError from AttributeErrorRouter bubbles up" dive = Book() dive.title = "Dive into Python" dive.published = datetime.date(2009, 5, 4) with self.override_router(): with self.assertRaises(AttributeError): dive.save() def test_attribute_error_delete(self): "The AttributeError from AttributeErrorRouter bubbles up" b = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) p = Person.objects.create(name="Marty Alchin") b.authors.set([p]) b.editor = p with self.override_router(): with self.assertRaises(AttributeError): b.delete() def test_attribute_error_m2m(self): "The AttributeError from AttributeErrorRouter bubbles up" b = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) p = Person.objects.create(name="Marty Alchin") with self.override_router(): with self.assertRaises(AttributeError): b.authors.set([p]) class ModelMetaRouter: "A router to ensure model arguments are real model classes" def db_for_write(self, model, **hints): if not hasattr(model, "_meta"): raise ValueError @override_settings(DATABASE_ROUTERS=[ModelMetaRouter()]) class RouterModelArgumentTestCase(TestCase): databases = {"default", "other"} def test_m2m_collection(self): b = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) p = Person.objects.create(name="Marty Alchin") # test add b.authors.add(p) # test remove b.authors.remove(p) # test clear b.authors.clear() # test setattr b.authors.set([p]) # test M2M collection b.delete() def test_foreignkey_collection(self): person = Person.objects.create(name="Bob") Pet.objects.create(owner=person, name="Wart") # test related FK collection person.delete() class SyncOnlyDefaultDatabaseRouter: def allow_migrate(self, db, app_label, **hints): return db == DEFAULT_DB_ALIAS class MigrateTestCase(TestCase): # Limit memory usage when calling 'migrate'. available_apps = [ "multiple_database", "django.contrib.auth", "django.contrib.contenttypes", ] databases = {"default", "other"} def test_migrate_to_other_database(self): """Regression test for #16039: migrate with --database option.""" cts = ContentType.objects.using("other").filter(app_label="multiple_database") count = cts.count() self.assertGreater(count, 0) cts.delete() management.call_command( "migrate", verbosity=0, interactive=False, database="other" ) self.assertEqual(cts.count(), count) def test_migrate_to_other_database_with_router(self): """Regression test for #16039: migrate with --database option.""" cts = ContentType.objects.using("other").filter(app_label="multiple_database") cts.delete() with override_settings(DATABASE_ROUTERS=[SyncOnlyDefaultDatabaseRouter()]): management.call_command( "migrate", verbosity=0, interactive=False, database="other" ) self.assertEqual(cts.count(), 0) class RouterUsed(Exception): WRITE = "write" def __init__(self, mode, model, hints): self.mode = mode self.model = model self.hints = hints class RouteForWriteTestCase(TestCase): databases = {"default", "other"} class WriteCheckRouter: def db_for_write(self, model, **hints): raise RouterUsed(mode=RouterUsed.WRITE, model=model, hints=hints) def override_router(self): return override_settings( DATABASE_ROUTERS=[RouteForWriteTestCase.WriteCheckRouter()] ) def test_fk_delete(self): owner = Person.objects.create(name="Someone") pet = Pet.objects.create(name="fido", owner=owner) with self.assertRaises(RouterUsed) as cm: with self.override_router(): pet.owner.delete() e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Person) self.assertEqual(e.hints, {"instance": owner}) def test_reverse_fk_delete(self): owner = Person.objects.create(name="Someone") to_del_qs = owner.pet_set.all() with self.assertRaises(RouterUsed) as cm: with self.override_router(): to_del_qs.delete() e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Pet) self.assertEqual(e.hints, {"instance": owner}) def test_reverse_fk_get_or_create(self): owner = Person.objects.create(name="Someone") with self.assertRaises(RouterUsed) as cm: with self.override_router(): owner.pet_set.get_or_create(name="fido") e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Pet) self.assertEqual(e.hints, {"instance": owner}) def test_reverse_fk_update(self): owner = Person.objects.create(name="Someone") Pet.objects.create(name="fido", owner=owner) with self.assertRaises(RouterUsed) as cm: with self.override_router(): owner.pet_set.update(name="max") e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Pet) self.assertEqual(e.hints, {"instance": owner}) def test_m2m_add(self): auth = Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) with self.assertRaises(RouterUsed) as cm: with self.override_router(): book.authors.add(auth) e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Book.authors.through) self.assertEqual(e.hints, {"instance": book}) def test_m2m_clear(self): auth = Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) book.authors.add(auth) with self.assertRaises(RouterUsed) as cm: with self.override_router(): book.authors.clear() e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Book.authors.through) self.assertEqual(e.hints, {"instance": book}) def test_m2m_delete(self): auth = Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) book.authors.add(auth) with self.assertRaises(RouterUsed) as cm: with self.override_router(): book.authors.all().delete() e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Person) self.assertEqual(e.hints, {"instance": book}) def test_m2m_get_or_create(self): Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) with self.assertRaises(RouterUsed) as cm: with self.override_router(): book.authors.get_or_create(name="Someone else") e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Book) self.assertEqual(e.hints, {"instance": book}) def test_m2m_remove(self): auth = Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) book.authors.add(auth) with self.assertRaises(RouterUsed) as cm: with self.override_router(): book.authors.remove(auth) e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Book.authors.through) self.assertEqual(e.hints, {"instance": book}) def test_m2m_update(self): auth = Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) book.authors.add(auth) with self.assertRaises(RouterUsed) as cm: with self.override_router(): book.authors.update(name="Different") e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Person) self.assertEqual(e.hints, {"instance": book}) def test_reverse_m2m_add(self): auth = Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) with self.assertRaises(RouterUsed) as cm: with self.override_router(): auth.book_set.add(book) e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Book.authors.through) self.assertEqual(e.hints, {"instance": auth}) def test_reverse_m2m_clear(self): auth = Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) book.authors.add(auth) with self.assertRaises(RouterUsed) as cm: with self.override_router(): auth.book_set.clear() e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Book.authors.through) self.assertEqual(e.hints, {"instance": auth}) def test_reverse_m2m_delete(self): auth = Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) book.authors.add(auth) with self.assertRaises(RouterUsed) as cm: with self.override_router(): auth.book_set.all().delete() e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Book) self.assertEqual(e.hints, {"instance": auth}) def test_reverse_m2m_get_or_create(self): auth = Person.objects.create(name="Someone") Book.objects.create(title="Pro Django", published=datetime.date(2008, 12, 16)) with self.assertRaises(RouterUsed) as cm: with self.override_router(): auth.book_set.get_or_create( title="New Book", published=datetime.datetime.now() ) e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Person) self.assertEqual(e.hints, {"instance": auth}) def test_reverse_m2m_remove(self): auth = Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) book.authors.add(auth) with self.assertRaises(RouterUsed) as cm: with self.override_router(): auth.book_set.remove(book) e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Book.authors.through) self.assertEqual(e.hints, {"instance": auth}) def test_reverse_m2m_update(self): auth = Person.objects.create(name="Someone") book = Book.objects.create( title="Pro Django", published=datetime.date(2008, 12, 16) ) book.authors.add(auth) with self.assertRaises(RouterUsed) as cm: with self.override_router(): auth.book_set.update(title="Different") e = cm.exception self.assertEqual(e.mode, RouterUsed.WRITE) self.assertEqual(e.model, Book) self.assertEqual(e.hints, {"instance": auth}) class NoRelationRouter: """Disallow all relations.""" def allow_relation(self, obj1, obj2, **hints): return False @override_settings(DATABASE_ROUTERS=[NoRelationRouter()]) class RelationAssignmentTests(SimpleTestCase): """allow_relation() is called with unsaved model instances.""" databases = {"default", "other"} router_prevents_msg = "the current database router prevents this relation" def test_foreign_key_relation(self): person = Person(name="Someone") pet = Pet() with self.assertRaisesMessage(ValueError, self.router_prevents_msg): pet.owner = person def test_reverse_one_to_one_relation(self): user = User(username="Someone", password="fake_hash") profile = UserProfile() with self.assertRaisesMessage(ValueError, self.router_prevents_msg): user.userprofile = profile
0a570630f764f288a2ca43c372fe8b28822bed3c62bc7a3fcab759a357dfd4c4
import datetime from unittest import mock from django.contrib import admin from django.contrib.admin.models import LogEntry from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.admin.templatetags.admin_list import pagination from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.admin.views.main import ( ALL_VAR, IS_POPUP_VAR, ORDER_VAR, PAGE_VAR, SEARCH_VAR, TO_FIELD_VAR, ) from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType from django.contrib.messages.storage.cookie import CookieStorage from django.db import DatabaseError, connection, models from django.db.models import F, Field, IntegerField from django.db.models.functions import Upper from django.db.models.lookups import Contains, Exact from django.template import Context, Template, TemplateSyntaxError from django.test import TestCase, override_settings, skipUnlessDBFeature from django.test.client import RequestFactory from django.test.utils import CaptureQueriesContext, isolate_apps, register_lookup from django.urls import reverse from django.utils import formats from .admin import ( BandAdmin, ChildAdmin, ChordsBandAdmin, ConcertAdmin, CustomPaginationAdmin, CustomPaginator, DynamicListDisplayChildAdmin, DynamicListDisplayLinksChildAdmin, DynamicListFilterChildAdmin, DynamicSearchFieldsChildAdmin, EmptyValueChildAdmin, EventAdmin, FilteredChildAdmin, GroupAdmin, InvitationAdmin, NoListDisplayLinksParentAdmin, ParentAdmin, ParentAdminTwoSearchFields, QuartetAdmin, SwallowAdmin, ) from .admin import site as custom_site from .models import ( Band, CharPK, Child, ChordsBand, ChordsMusician, Concert, CustomIdUser, Event, Genre, Group, Invitation, Membership, Musician, OrderedObject, Parent, Quartet, Swallow, SwallowOneToOne, UnorderedObject, ) def build_tbody_html(pk, href, extra_fields): return ( "<tbody><tr>" '<td class="action-checkbox">' '<input type="checkbox" name="_selected_action" value="{}" ' 'class="action-select"></td>' '<th class="field-name"><a href="{}">name</a></th>' "{}</tr></tbody>" ).format(pk, href, extra_fields) @override_settings(ROOT_URLCONF="admin_changelist.urls") class ChangeListTests(TestCase): factory = RequestFactory() @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser( username="super", email="[email protected]", password="xxx" ) def _create_superuser(self, username): return User.objects.create_superuser( username=username, email="[email protected]", password="xxx" ) def _mocked_authenticated_request(self, url, user): request = self.factory.get(url) request.user = user return request def test_repr(self): m = ChildAdmin(Child, custom_site) request = self.factory.get("/child/") request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(repr(cl), "<ChangeList: model=Child model_admin=ChildAdmin>") def test_specified_ordering_by_f_expression(self): class OrderedByFBandAdmin(admin.ModelAdmin): list_display = ["name", "genres", "nr_of_members"] ordering = ( F("nr_of_members").desc(nulls_last=True), Upper(F("name")).asc(), F("genres").asc(), ) m = OrderedByFBandAdmin(Band, custom_site) request = self.factory.get("/band/") request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.get_ordering_field_columns(), {3: "desc", 2: "asc"}) def test_specified_ordering_by_f_expression_without_asc_desc(self): class OrderedByFBandAdmin(admin.ModelAdmin): list_display = ["name", "genres", "nr_of_members"] ordering = (F("nr_of_members"), Upper("name"), F("genres")) m = OrderedByFBandAdmin(Band, custom_site) request = self.factory.get("/band/") request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.get_ordering_field_columns(), {3: "asc", 2: "asc"}) def test_select_related_preserved(self): """ Regression test for #10348: ChangeList.get_queryset() shouldn't overwrite a custom select_related provided by ModelAdmin.get_queryset(). """ m = ChildAdmin(Child, custom_site) request = self.factory.get("/child/") request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.query.select_related, {"parent": {}}) def test_select_related_preserved_when_multi_valued_in_search_fields(self): parent = Parent.objects.create(name="Mary") Child.objects.create(parent=parent, name="Danielle") Child.objects.create(parent=parent, name="Daniel") m = ParentAdmin(Parent, custom_site) request = self.factory.get("/parent/", data={SEARCH_VAR: "daniel"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 1) # select_related is preserved. self.assertEqual(cl.queryset.query.select_related, {"child": {}}) def test_select_related_as_tuple(self): ia = InvitationAdmin(Invitation, custom_site) request = self.factory.get("/invitation/") request.user = self.superuser cl = ia.get_changelist_instance(request) self.assertEqual(cl.queryset.query.select_related, {"player": {}}) def test_select_related_as_empty_tuple(self): ia = InvitationAdmin(Invitation, custom_site) ia.list_select_related = () request = self.factory.get("/invitation/") request.user = self.superuser cl = ia.get_changelist_instance(request) self.assertIs(cl.queryset.query.select_related, False) def test_get_select_related_custom_method(self): class GetListSelectRelatedAdmin(admin.ModelAdmin): list_display = ("band", "player") def get_list_select_related(self, request): return ("band", "player") ia = GetListSelectRelatedAdmin(Invitation, custom_site) request = self.factory.get("/invitation/") request.user = self.superuser cl = ia.get_changelist_instance(request) self.assertEqual(cl.queryset.query.select_related, {"player": {}, "band": {}}) def test_many_search_terms(self): parent = Parent.objects.create(name="Mary") Child.objects.create(parent=parent, name="Danielle") Child.objects.create(parent=parent, name="Daniel") m = ParentAdmin(Parent, custom_site) request = self.factory.get("/parent/", data={SEARCH_VAR: "daniel " * 80}) request.user = self.superuser cl = m.get_changelist_instance(request) with CaptureQueriesContext(connection) as context: object_count = cl.queryset.count() self.assertEqual(object_count, 1) self.assertEqual(context.captured_queries[0]["sql"].count("JOIN"), 1) def test_related_field_multiple_search_terms(self): """ Searches over multi-valued relationships return rows from related models only when all searched fields match that row. """ parent = Parent.objects.create(name="Mary") Child.objects.create(parent=parent, name="Danielle", age=18) Child.objects.create(parent=parent, name="Daniel", age=19) m = ParentAdminTwoSearchFields(Parent, custom_site) request = self.factory.get("/parent/", data={SEARCH_VAR: "danielle 19"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 0) request = self.factory.get("/parent/", data={SEARCH_VAR: "daniel 19"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 1) def test_result_list_empty_changelist_value(self): """ Regression test for #14982: EMPTY_CHANGELIST_VALUE should be honored for relationship fields """ new_child = Child.objects.create(name="name", parent=None) request = self.factory.get("/child/") request.user = self.superuser m = ChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) cl.formset = None template = Template( "{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}" ) context = Context({"cl": cl, "opts": Child._meta}) table_output = template.render(context) link = reverse("admin:admin_changelist_child_change", args=(new_child.id,)) row_html = build_tbody_html( new_child.id, link, '<td class="field-parent nowrap">-</td>' ) self.assertNotEqual( table_output.find(row_html), -1, "Failed to find expected row element: %s" % table_output, ) def test_result_list_set_empty_value_display_on_admin_site(self): """ Empty value display can be set on AdminSite. """ new_child = Child.objects.create(name="name", parent=None) request = self.factory.get("/child/") request.user = self.superuser # Set a new empty display value on AdminSite. admin.site.empty_value_display = "???" m = ChildAdmin(Child, admin.site) cl = m.get_changelist_instance(request) cl.formset = None template = Template( "{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}" ) context = Context({"cl": cl, "opts": Child._meta}) table_output = template.render(context) link = reverse("admin:admin_changelist_child_change", args=(new_child.id,)) row_html = build_tbody_html( new_child.id, link, '<td class="field-parent nowrap">???</td>' ) self.assertNotEqual( table_output.find(row_html), -1, "Failed to find expected row element: %s" % table_output, ) def test_result_list_set_empty_value_display_in_model_admin(self): """ Empty value display can be set in ModelAdmin or individual fields. """ new_child = Child.objects.create(name="name", parent=None) request = self.factory.get("/child/") request.user = self.superuser m = EmptyValueChildAdmin(Child, admin.site) cl = m.get_changelist_instance(request) cl.formset = None template = Template( "{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}" ) context = Context({"cl": cl, "opts": Child._meta}) table_output = template.render(context) link = reverse("admin:admin_changelist_child_change", args=(new_child.id,)) row_html = build_tbody_html( new_child.id, link, '<td class="field-age_display">&amp;dagger;</td>' '<td class="field-age">-empty-</td>', ) self.assertNotEqual( table_output.find(row_html), -1, "Failed to find expected row element: %s" % table_output, ) def test_result_list_html(self): """ Inclusion tag result_list generates a table when with default ModelAdmin settings. """ new_parent = Parent.objects.create(name="parent") new_child = Child.objects.create(name="name", parent=new_parent) request = self.factory.get("/child/") request.user = self.superuser m = ChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) cl.formset = None template = Template( "{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}" ) context = Context({"cl": cl, "opts": Child._meta}) table_output = template.render(context) link = reverse("admin:admin_changelist_child_change", args=(new_child.id,)) row_html = build_tbody_html( new_child.id, link, '<td class="field-parent nowrap">%s</td>' % new_parent ) self.assertNotEqual( table_output.find(row_html), -1, "Failed to find expected row element: %s" % table_output, ) def test_result_list_editable_html(self): """ Regression tests for #11791: Inclusion tag result_list generates a table and this checks that the items are nested within the table element tags. Also a regression test for #13599, verifies that hidden fields when list_editable is enabled are rendered in a div outside the table. """ new_parent = Parent.objects.create(name="parent") new_child = Child.objects.create(name="name", parent=new_parent) request = self.factory.get("/child/") request.user = self.superuser m = ChildAdmin(Child, custom_site) # Test with list_editable fields m.list_display = ["id", "name", "parent"] m.list_display_links = ["id"] m.list_editable = ["name"] cl = m.get_changelist_instance(request) FormSet = m.get_changelist_formset(request) cl.formset = FormSet(queryset=cl.result_list) template = Template( "{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}" ) context = Context({"cl": cl, "opts": Child._meta}) table_output = template.render(context) # make sure that hidden fields are in the correct place hiddenfields_div = ( '<div class="hiddenfields">' '<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">' "</div>" ) % new_child.id self.assertInHTML( hiddenfields_div, table_output, msg_prefix="Failed to find hidden fields" ) # make sure that list editable fields are rendered in divs correctly editable_name_field = ( '<input name="form-0-name" value="name" class="vTextField" ' 'maxlength="30" type="text" id="id_form-0-name">' ) self.assertInHTML( '<td class="field-name">%s</td>' % editable_name_field, table_output, msg_prefix='Failed to find "name" list_editable field', ) def test_result_list_editable(self): """ Regression test for #14312: list_editable with pagination """ new_parent = Parent.objects.create(name="parent") for i in range(1, 201): Child.objects.create(name="name %s" % i, parent=new_parent) request = self.factory.get("/child/", data={"p": -1}) # Anything outside range request.user = self.superuser m = ChildAdmin(Child, custom_site) # Test with list_editable fields m.list_display = ["id", "name", "parent"] m.list_display_links = ["id"] m.list_editable = ["name"] with self.assertRaises(IncorrectLookupParameters): m.get_changelist_instance(request) @skipUnlessDBFeature("supports_transactions") def test_list_editable_atomicity(self): a = Swallow.objects.create(origin="Swallow A", load=4, speed=1) b = Swallow.objects.create(origin="Swallow B", load=2, speed=2) self.client.force_login(self.superuser) changelist_url = reverse("admin:admin_changelist_swallow_changelist") data = { "form-TOTAL_FORMS": "2", "form-INITIAL_FORMS": "2", "form-MIN_NUM_FORMS": "0", "form-MAX_NUM_FORMS": "1000", "form-0-uuid": str(a.pk), "form-1-uuid": str(b.pk), "form-0-load": "9.0", "form-0-speed": "3.0", "form-1-load": "5.0", "form-1-speed": "1.0", "_save": "Save", } with mock.patch( "django.contrib.admin.ModelAdmin.log_change", side_effect=DatabaseError ): with self.assertRaises(DatabaseError): self.client.post(changelist_url, data) # Original values are preserved. a.refresh_from_db() self.assertEqual(a.load, 4) self.assertEqual(a.speed, 1) b.refresh_from_db() self.assertEqual(b.load, 2) self.assertEqual(b.speed, 2) with mock.patch( "django.contrib.admin.ModelAdmin.log_change", side_effect=[None, DatabaseError], ): with self.assertRaises(DatabaseError): self.client.post(changelist_url, data) # Original values are preserved. a.refresh_from_db() self.assertEqual(a.load, 4) self.assertEqual(a.speed, 1) b.refresh_from_db() self.assertEqual(b.load, 2) self.assertEqual(b.speed, 2) def test_custom_paginator(self): new_parent = Parent.objects.create(name="parent") for i in range(1, 201): Child.objects.create(name="name %s" % i, parent=new_parent) request = self.factory.get("/child/") request.user = self.superuser m = CustomPaginationAdmin(Child, custom_site) cl = m.get_changelist_instance(request) cl.get_results(request) self.assertIsInstance(cl.paginator, CustomPaginator) def test_no_duplicates_for_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Basic ManyToMany. """ blues = Genre.objects.create(name="Blues") band = Band.objects.create(name="B.B. King Review", nr_of_members=11) band.genres.add(blues) band.genres.add(blues) m = BandAdmin(Band, custom_site) request = self.factory.get("/band/", data={"genres": blues.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Group instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_through_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. With an intermediate model. """ lead = Musician.objects.create(name="Vox") band = Group.objects.create(name="The Hype") Membership.objects.create(group=band, music=lead, role="lead voice") Membership.objects.create(group=band, music=lead, role="bass player") m = GroupAdmin(Group, custom_site) request = self.factory.get("/group/", data={"members": lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Group instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_through_m2m_at_second_level_in_list_filter(self): """ When using a ManyToMany in list_filter at the second level behind a ForeignKey, results shouldn't appear more than once. """ lead = Musician.objects.create(name="Vox") band = Group.objects.create(name="The Hype") Concert.objects.create(name="Woodstock", group=band) Membership.objects.create(group=band, music=lead, role="lead voice") Membership.objects.create(group=band, music=lead, role="bass player") m = ConcertAdmin(Concert, custom_site) request = self.factory.get("/concert/", data={"group__members": lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Concert instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_inherited_m2m_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Model managed in the admin inherits from the one that defines the relationship. """ lead = Musician.objects.create(name="John") four = Quartet.objects.create(name="The Beatles") Membership.objects.create(group=four, music=lead, role="lead voice") Membership.objects.create(group=four, music=lead, role="guitar player") m = QuartetAdmin(Quartet, custom_site) request = self.factory.get("/quartet/", data={"members": lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one Quartet instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_m2m_to_inherited_in_list_filter(self): """ Regression test for #13902: When using a ManyToMany in list_filter, results shouldn't appear more than once. Target of the relationship inherits from another. """ lead = ChordsMusician.objects.create(name="Player A") three = ChordsBand.objects.create(name="The Chords Trio") Invitation.objects.create(band=three, player=lead, instrument="guitar") Invitation.objects.create(band=three, player=lead, instrument="bass") m = ChordsBandAdmin(ChordsBand, custom_site) request = self.factory.get("/chordsband/", data={"members": lead.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) cl.get_results(request) # There's only one ChordsBand instance self.assertEqual(cl.result_count, 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_non_unique_related_object_in_list_filter(self): """ Regressions tests for #15819: If a field listed in list_filters is a non-unique related object, results shouldn't appear more than once. """ parent = Parent.objects.create(name="Mary") # Two children with the same name Child.objects.create(parent=parent, name="Daniel") Child.objects.create(parent=parent, name="Daniel") m = ParentAdmin(Parent, custom_site) request = self.factory.get("/parent/", data={"child__name": "Daniel"}) request.user = self.superuser cl = m.get_changelist_instance(request) # Exists() is applied. self.assertEqual(cl.queryset.count(), 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_changelist_search_form_validation(self): m = ConcertAdmin(Concert, custom_site) tests = [ ({SEARCH_VAR: "\x00"}, "Null characters are not allowed."), ({SEARCH_VAR: "some\x00thing"}, "Null characters are not allowed."), ] for case, error in tests: with self.subTest(case=case): request = self.factory.get("/concert/", case) request.user = self.superuser request._messages = CookieStorage(request) m.get_changelist_instance(request) messages = [m.message for m in request._messages] self.assertEqual(1, len(messages)) self.assertEqual(error, messages[0]) def test_no_duplicates_for_non_unique_related_object_in_search_fields(self): """ Regressions tests for #15819: If a field listed in search_fields is a non-unique related object, Exists() must be applied. """ parent = Parent.objects.create(name="Mary") Child.objects.create(parent=parent, name="Danielle") Child.objects.create(parent=parent, name="Daniel") m = ParentAdmin(Parent, custom_site) request = self.factory.get("/parent/", data={SEARCH_VAR: "daniel"}) request.user = self.superuser cl = m.get_changelist_instance(request) # Exists() is applied. self.assertEqual(cl.queryset.count(), 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_no_duplicates_for_many_to_many_at_second_level_in_search_fields(self): """ When using a ManyToMany in search_fields at the second level behind a ForeignKey, Exists() must be applied and results shouldn't appear more than once. """ lead = Musician.objects.create(name="Vox") band = Group.objects.create(name="The Hype") Concert.objects.create(name="Woodstock", group=band) Membership.objects.create(group=band, music=lead, role="lead voice") Membership.objects.create(group=band, music=lead, role="bass player") m = ConcertAdmin(Concert, custom_site) request = self.factory.get("/concert/", data={SEARCH_VAR: "vox"}) request.user = self.superuser cl = m.get_changelist_instance(request) # There's only one Concert instance self.assertEqual(cl.queryset.count(), 1) # Queryset must be deletable. self.assertIs(cl.queryset.query.distinct, False) cl.queryset.delete() self.assertEqual(cl.queryset.count(), 0) def test_multiple_search_fields(self): """ All rows containing each of the searched words are returned, where each word must be in one of search_fields. """ band_duo = Group.objects.create(name="Duo") band_hype = Group.objects.create(name="The Hype") mary = Musician.objects.create(name="Mary Halvorson") jonathan = Musician.objects.create(name="Jonathan Finlayson") band_duo.members.set([mary, jonathan]) Concert.objects.create(name="Tiny desk concert", group=band_duo) Concert.objects.create(name="Woodstock concert", group=band_hype) # FK lookup. concert_model_admin = ConcertAdmin(Concert, custom_site) concert_model_admin.search_fields = ["group__name", "name"] # Reverse FK lookup. group_model_admin = GroupAdmin(Group, custom_site) group_model_admin.search_fields = ["name", "concert__name", "members__name"] for search_string, result_count in ( ("Duo Concert", 1), ("Tiny Desk Concert", 1), ("Concert", 2), ("Other Concert", 0), ("Duo Woodstock", 0), ): with self.subTest(search_string=search_string): # FK lookup. request = self.factory.get( "/concert/", data={SEARCH_VAR: search_string} ) request.user = self.superuser concert_changelist = concert_model_admin.get_changelist_instance( request ) self.assertEqual(concert_changelist.queryset.count(), result_count) # Reverse FK lookup. request = self.factory.get("/group/", data={SEARCH_VAR: search_string}) request.user = self.superuser group_changelist = group_model_admin.get_changelist_instance(request) self.assertEqual(group_changelist.queryset.count(), result_count) # Many-to-many lookup. for search_string, result_count in ( ("Finlayson Duo Tiny", 1), ("Finlayson", 1), ("Finlayson Hype", 0), ("Jonathan Finlayson Duo", 1), ("Mary Jonathan Duo", 0), ("Oscar Finlayson Duo", 0), ): with self.subTest(search_string=search_string): request = self.factory.get("/group/", data={SEARCH_VAR: search_string}) request.user = self.superuser group_changelist = group_model_admin.get_changelist_instance(request) self.assertEqual(group_changelist.queryset.count(), result_count) def test_pk_in_search_fields(self): band = Group.objects.create(name="The Hype") Concert.objects.create(name="Woodstock", group=band) m = ConcertAdmin(Concert, custom_site) m.search_fields = ["group__pk"] request = self.factory.get("/concert/", data={SEARCH_VAR: band.pk}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 1) request = self.factory.get("/concert/", data={SEARCH_VAR: band.pk + 5}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 0) def test_builtin_lookup_in_search_fields(self): band = Group.objects.create(name="The Hype") concert = Concert.objects.create(name="Woodstock", group=band) m = ConcertAdmin(Concert, custom_site) m.search_fields = ["name__iexact"] request = self.factory.get("/", data={SEARCH_VAR: "woodstock"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [concert]) request = self.factory.get("/", data={SEARCH_VAR: "wood"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, []) def test_custom_lookup_in_search_fields(self): band = Group.objects.create(name="The Hype") concert = Concert.objects.create(name="Woodstock", group=band) m = ConcertAdmin(Concert, custom_site) m.search_fields = ["group__name__cc"] with register_lookup(Field, Contains, lookup_name="cc"): request = self.factory.get("/", data={SEARCH_VAR: "Hype"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [concert]) request = self.factory.get("/", data={SEARCH_VAR: "Woodstock"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, []) def test_spanning_relations_with_custom_lookup_in_search_fields(self): hype = Group.objects.create(name="The Hype") concert = Concert.objects.create(name="Woodstock", group=hype) vox = Musician.objects.create(name="Vox", age=20) Membership.objects.create(music=vox, group=hype) # Register a custom lookup on IntegerField to ensure that field # traversing logic in ModelAdmin.get_search_results() works. with register_lookup(IntegerField, Exact, lookup_name="exactly"): m = ConcertAdmin(Concert, custom_site) m.search_fields = ["group__members__age__exactly"] request = self.factory.get("/", data={SEARCH_VAR: "20"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [concert]) request = self.factory.get("/", data={SEARCH_VAR: "21"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, []) def test_custom_lookup_with_pk_shortcut(self): self.assertEqual(CharPK._meta.pk.name, "char_pk") # Not equal to 'pk'. m = admin.ModelAdmin(CustomIdUser, custom_site) abc = CharPK.objects.create(char_pk="abc") abcd = CharPK.objects.create(char_pk="abcd") m = admin.ModelAdmin(CharPK, custom_site) m.search_fields = ["pk__exact"] request = self.factory.get("/", data={SEARCH_VAR: "abc"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [abc]) request = self.factory.get("/", data={SEARCH_VAR: "abcd"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertCountEqual(cl.queryset, [abcd]) def test_no_exists_for_m2m_in_list_filter_without_params(self): """ If a ManyToManyField is in list_filter but isn't in any lookup params, the changelist's query shouldn't have Exists(). """ m = BandAdmin(Band, custom_site) for lookup_params in ({}, {"name": "test"}): request = self.factory.get("/band/", lookup_params) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertNotIn(" EXISTS", str(cl.queryset.query)) # A ManyToManyField in params does have Exists() applied. request = self.factory.get("/band/", {"genres": "0"}) request.user = self.superuser cl = m.get_changelist_instance(request) self.assertIn(" EXISTS", str(cl.queryset.query)) def test_pagination(self): """ Regression tests for #12893: Pagination in admins changelist doesn't use queryset set by modeladmin. """ parent = Parent.objects.create(name="anything") for i in range(1, 31): Child.objects.create(name="name %s" % i, parent=parent) Child.objects.create(name="filtered %s" % i, parent=parent) request = self.factory.get("/child/") request.user = self.superuser # Test default queryset m = ChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 60) self.assertEqual(cl.paginator.count, 60) self.assertEqual(list(cl.paginator.page_range), [1, 2, 3, 4, 5, 6]) # Test custom queryset m = FilteredChildAdmin(Child, custom_site) cl = m.get_changelist_instance(request) self.assertEqual(cl.queryset.count(), 30) self.assertEqual(cl.paginator.count, 30) self.assertEqual(list(cl.paginator.page_range), [1, 2, 3]) def test_computed_list_display_localization(self): """ Regression test for #13196: output of functions should be localized in the changelist. """ self.client.force_login(self.superuser) event = Event.objects.create(date=datetime.date.today()) response = self.client.get(reverse("admin:admin_changelist_event_changelist")) self.assertContains(response, formats.localize(event.date)) self.assertNotContains(response, str(event.date)) def test_dynamic_list_display(self): """ Regression tests for #14206: dynamic list_display support. """ parent = Parent.objects.create(name="parent") for i in range(10): Child.objects.create(name="child %s" % i, parent=parent) user_noparents = self._create_superuser("noparents") user_parents = self._create_superuser("parents") # Test with user 'noparents' m = custom_site._registry[Child] request = self._mocked_authenticated_request("/child/", user_noparents) response = m.changelist_view(request) self.assertNotContains(response, "Parent object") list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ["name", "age"]) self.assertEqual(list_display_links, ["name"]) # Test with user 'parents' m = DynamicListDisplayChildAdmin(Child, custom_site) request = self._mocked_authenticated_request("/child/", user_parents) response = m.changelist_view(request) self.assertContains(response, "Parent object") custom_site.unregister(Child) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ("parent", "name", "age")) self.assertEqual(list_display_links, ["parent"]) # Test default implementation custom_site.register(Child, ChildAdmin) m = custom_site._registry[Child] request = self._mocked_authenticated_request("/child/", user_noparents) response = m.changelist_view(request) self.assertContains(response, "Parent object") def test_show_all(self): parent = Parent.objects.create(name="anything") for i in range(1, 31): Child.objects.create(name="name %s" % i, parent=parent) Child.objects.create(name="filtered %s" % i, parent=parent) # Add "show all" parameter to request request = self.factory.get("/child/", data={ALL_VAR: ""}) request.user = self.superuser # Test valid "show all" request (number of total objects is under max) m = ChildAdmin(Child, custom_site) m.list_max_show_all = 200 # 200 is the max we'll pass to ChangeList cl = m.get_changelist_instance(request) cl.get_results(request) self.assertEqual(len(cl.result_list), 60) # Test invalid "show all" request (number of total objects over max) # falls back to paginated pages m = ChildAdmin(Child, custom_site) m.list_max_show_all = 30 # 30 is the max we'll pass to ChangeList for this test cl = m.get_changelist_instance(request) cl.get_results(request) self.assertEqual(len(cl.result_list), 10) def test_dynamic_list_display_links(self): """ Regression tests for #16257: dynamic list_display_links support. """ parent = Parent.objects.create(name="parent") for i in range(1, 10): Child.objects.create(id=i, name="child %s" % i, parent=parent, age=i) m = DynamicListDisplayLinksChildAdmin(Child, custom_site) superuser = self._create_superuser("superuser") request = self._mocked_authenticated_request("/child/", superuser) response = m.changelist_view(request) for i in range(1, 10): link = reverse("admin:admin_changelist_child_change", args=(i,)) self.assertContains(response, '<a href="%s">%s</a>' % (link, i)) list_display = m.get_list_display(request) list_display_links = m.get_list_display_links(request, list_display) self.assertEqual(list_display, ("parent", "name", "age")) self.assertEqual(list_display_links, ["age"]) def test_no_list_display_links(self): """#15185 -- Allow no links from the 'change list' view grid.""" p = Parent.objects.create(name="parent") m = NoListDisplayLinksParentAdmin(Parent, custom_site) superuser = self._create_superuser("superuser") request = self._mocked_authenticated_request("/parent/", superuser) response = m.changelist_view(request) link = reverse("admin:admin_changelist_parent_change", args=(p.pk,)) self.assertNotContains(response, '<a href="%s">' % link) def test_clear_all_filters_link(self): self.client.force_login(self.superuser) url = reverse("admin:auth_user_changelist") response = self.client.get(url) self.assertNotContains(response, "&#10006; Clear all filters") link = '<a href="%s">&#10006; Clear all filters</a>' for data, href in ( ({"is_staff__exact": "0"}, "?"), ( {"is_staff__exact": "0", "username__startswith": "test"}, "?username__startswith=test", ), ( {"is_staff__exact": "0", SEARCH_VAR: "test"}, "?%s=test" % SEARCH_VAR, ), ( {"is_staff__exact": "0", IS_POPUP_VAR: "id"}, "?%s=id" % IS_POPUP_VAR, ), ): with self.subTest(data=data): response = self.client.get(url, data=data) self.assertContains(response, link % href) def test_clear_all_filters_link_callable_filter(self): self.client.force_login(self.superuser) url = reverse("admin:admin_changelist_band_changelist") response = self.client.get(url) self.assertNotContains(response, "&#10006; Clear all filters") link = '<a href="%s">&#10006; Clear all filters</a>' for data, href in ( ({"nr_of_members_partition": "5"}, "?"), ( {"nr_of_members_partition": "more", "name__startswith": "test"}, "?name__startswith=test", ), ( {"nr_of_members_partition": "5", IS_POPUP_VAR: "id"}, "?%s=id" % IS_POPUP_VAR, ), ): with self.subTest(data=data): response = self.client.get(url, data=data) self.assertContains(response, link % href) def test_no_clear_all_filters_link(self): self.client.force_login(self.superuser) url = reverse("admin:auth_user_changelist") link = ">&#10006; Clear all filters</a>" for data in ( {SEARCH_VAR: "test"}, {ORDER_VAR: "-1"}, {TO_FIELD_VAR: "id"}, {PAGE_VAR: "1"}, {IS_POPUP_VAR: "1"}, {"username__startswith": "test"}, ): with self.subTest(data=data): response = self.client.get(url, data=data) self.assertNotContains(response, link) def test_tuple_list_display(self): swallow = Swallow.objects.create(origin="Africa", load="12.34", speed="22.2") swallow2 = Swallow.objects.create(origin="Africa", load="12.34", speed="22.2") swallow_o2o = SwallowOneToOne.objects.create(swallow=swallow2) model_admin = SwallowAdmin(Swallow, custom_site) superuser = self._create_superuser("superuser") request = self._mocked_authenticated_request("/swallow/", superuser) response = model_admin.changelist_view(request) # just want to ensure it doesn't blow up during rendering self.assertContains(response, str(swallow.origin)) self.assertContains(response, str(swallow.load)) self.assertContains(response, str(swallow.speed)) # Reverse one-to-one relations should work. self.assertContains(response, '<td class="field-swallowonetoone">-</td>') self.assertContains( response, '<td class="field-swallowonetoone">%s</td>' % swallow_o2o ) def test_multiuser_edit(self): """ Simultaneous edits of list_editable fields on the changelist by different users must not result in one user's edits creating a new object instead of modifying the correct existing object (#11313). """ # To replicate this issue, simulate the following steps: # 1. User1 opens an admin changelist with list_editable fields. # 2. User2 edits object "Foo" such that it moves to another page in # the pagination order and saves. # 3. User1 edits object "Foo" and saves. # 4. The edit made by User1 does not get applied to object "Foo" but # instead is used to create a new object (bug). # For this test, order the changelist by the 'speed' attribute and # display 3 objects per page (SwallowAdmin.list_per_page = 3). # Setup the test to reflect the DB state after step 2 where User2 has # edited the first swallow object's speed from '4' to '1'. a = Swallow.objects.create(origin="Swallow A", load=4, speed=1) b = Swallow.objects.create(origin="Swallow B", load=2, speed=2) c = Swallow.objects.create(origin="Swallow C", load=5, speed=5) d = Swallow.objects.create(origin="Swallow D", load=9, speed=9) superuser = self._create_superuser("superuser") self.client.force_login(superuser) changelist_url = reverse("admin:admin_changelist_swallow_changelist") # Send the POST from User1 for step 3. It's still using the changelist # ordering from before User2's edits in step 2. data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MIN_NUM_FORMS": "0", "form-MAX_NUM_FORMS": "1000", "form-0-uuid": str(d.pk), "form-1-uuid": str(c.pk), "form-2-uuid": str(a.pk), "form-0-load": "9.0", "form-0-speed": "9.0", "form-1-load": "5.0", "form-1-speed": "5.0", "form-2-load": "5.0", "form-2-speed": "4.0", "_save": "Save", } response = self.client.post( changelist_url, data, follow=True, extra={"o": "-2"} ) # The object User1 edited in step 3 is displayed on the changelist and # has the correct edits applied. self.assertContains(response, "1 swallow was changed successfully.") self.assertContains(response, a.origin) a.refresh_from_db() self.assertEqual(a.load, float(data["form-2-load"])) self.assertEqual(a.speed, float(data["form-2-speed"])) b.refresh_from_db() self.assertEqual(b.load, 2) self.assertEqual(b.speed, 2) c.refresh_from_db() self.assertEqual(c.load, float(data["form-1-load"])) self.assertEqual(c.speed, float(data["form-1-speed"])) d.refresh_from_db() self.assertEqual(d.load, float(data["form-0-load"])) self.assertEqual(d.speed, float(data["form-0-speed"])) # No new swallows were created. self.assertEqual(len(Swallow.objects.all()), 4) def test_get_edited_object_ids(self): a = Swallow.objects.create(origin="Swallow A", load=4, speed=1) b = Swallow.objects.create(origin="Swallow B", load=2, speed=2) c = Swallow.objects.create(origin="Swallow C", load=5, speed=5) superuser = self._create_superuser("superuser") self.client.force_login(superuser) changelist_url = reverse("admin:admin_changelist_swallow_changelist") m = SwallowAdmin(Swallow, custom_site) data = { "form-TOTAL_FORMS": "3", "form-INITIAL_FORMS": "3", "form-MIN_NUM_FORMS": "0", "form-MAX_NUM_FORMS": "1000", "form-0-uuid": str(a.pk), "form-1-uuid": str(b.pk), "form-2-uuid": str(c.pk), "form-0-load": "9.0", "form-0-speed": "9.0", "form-1-load": "5.0", "form-1-speed": "5.0", "form-2-load": "5.0", "form-2-speed": "4.0", "_save": "Save", } request = self.factory.post(changelist_url, data=data) pks = m._get_edited_object_pks(request, prefix="form") self.assertEqual(sorted(pks), sorted([str(a.pk), str(b.pk), str(c.pk)])) def test_get_list_editable_queryset(self): a = Swallow.objects.create(origin="Swallow A", load=4, speed=1) Swallow.objects.create(origin="Swallow B", load=2, speed=2) data = { "form-TOTAL_FORMS": "2", "form-INITIAL_FORMS": "2", "form-MIN_NUM_FORMS": "0", "form-MAX_NUM_FORMS": "1000", "form-0-uuid": str(a.pk), "form-0-load": "10", "_save": "Save", } superuser = self._create_superuser("superuser") self.client.force_login(superuser) changelist_url = reverse("admin:admin_changelist_swallow_changelist") m = SwallowAdmin(Swallow, custom_site) request = self.factory.post(changelist_url, data=data) queryset = m._get_list_editable_queryset(request, prefix="form") self.assertEqual(queryset.count(), 1) data["form-0-uuid"] = "INVALD_PRIMARY_KEY" # The unfiltered queryset is returned if there's invalid data. request = self.factory.post(changelist_url, data=data) queryset = m._get_list_editable_queryset(request, prefix="form") self.assertEqual(queryset.count(), 2) def test_get_list_editable_queryset_with_regex_chars_in_prefix(self): a = Swallow.objects.create(origin="Swallow A", load=4, speed=1) Swallow.objects.create(origin="Swallow B", load=2, speed=2) data = { "form$-TOTAL_FORMS": "2", "form$-INITIAL_FORMS": "2", "form$-MIN_NUM_FORMS": "0", "form$-MAX_NUM_FORMS": "1000", "form$-0-uuid": str(a.pk), "form$-0-load": "10", "_save": "Save", } superuser = self._create_superuser("superuser") self.client.force_login(superuser) changelist_url = reverse("admin:admin_changelist_swallow_changelist") m = SwallowAdmin(Swallow, custom_site) request = self.factory.post(changelist_url, data=data) queryset = m._get_list_editable_queryset(request, prefix="form$") self.assertEqual(queryset.count(), 1) def test_changelist_view_list_editable_changed_objects_uses_filter(self): """list_editable edits use a filtered queryset to limit memory usage.""" a = Swallow.objects.create(origin="Swallow A", load=4, speed=1) Swallow.objects.create(origin="Swallow B", load=2, speed=2) data = { "form-TOTAL_FORMS": "2", "form-INITIAL_FORMS": "2", "form-MIN_NUM_FORMS": "0", "form-MAX_NUM_FORMS": "1000", "form-0-uuid": str(a.pk), "form-0-load": "10", "_save": "Save", } superuser = self._create_superuser("superuser") self.client.force_login(superuser) changelist_url = reverse("admin:admin_changelist_swallow_changelist") with CaptureQueriesContext(connection) as context: response = self.client.post(changelist_url, data=data) self.assertEqual(response.status_code, 200) self.assertIn("WHERE", context.captured_queries[4]["sql"]) self.assertIn("IN", context.captured_queries[4]["sql"]) # Check only the first few characters since the UUID may have dashes. self.assertIn(str(a.pk)[:8], context.captured_queries[4]["sql"]) def test_deterministic_order_for_unordered_model(self): """ The primary key is used in the ordering of the changelist's results to guarantee a deterministic order, even when the model doesn't have any default ordering defined (#17198). """ superuser = self._create_superuser("superuser") for counter in range(1, 51): UnorderedObject.objects.create(id=counter, bool=True) class UnorderedObjectAdmin(admin.ModelAdmin): list_per_page = 10 def check_results_order(ascending=False): custom_site.register(UnorderedObject, UnorderedObjectAdmin) model_admin = UnorderedObjectAdmin(UnorderedObject, custom_site) counter = 0 if ascending else 51 for page in range(1, 6): request = self._mocked_authenticated_request( "/unorderedobject/?p=%s" % page, superuser ) response = model_admin.changelist_view(request) for result in response.context_data["cl"].result_list: counter += 1 if ascending else -1 self.assertEqual(result.id, counter) custom_site.unregister(UnorderedObject) # When no order is defined at all, everything is ordered by '-pk'. check_results_order() # When an order field is defined but multiple records have the same # value for that field, make sure everything gets ordered by -pk as well. UnorderedObjectAdmin.ordering = ["bool"] check_results_order() # When order fields are defined, including the pk itself, use them. UnorderedObjectAdmin.ordering = ["bool", "-pk"] check_results_order() UnorderedObjectAdmin.ordering = ["bool", "pk"] check_results_order(ascending=True) UnorderedObjectAdmin.ordering = ["-id", "bool"] check_results_order() UnorderedObjectAdmin.ordering = ["id", "bool"] check_results_order(ascending=True) def test_deterministic_order_for_model_ordered_by_its_manager(self): """ The primary key is used in the ordering of the changelist's results to guarantee a deterministic order, even when the model has a manager that defines a default ordering (#17198). """ superuser = self._create_superuser("superuser") for counter in range(1, 51): OrderedObject.objects.create(id=counter, bool=True, number=counter) class OrderedObjectAdmin(admin.ModelAdmin): list_per_page = 10 def check_results_order(ascending=False): custom_site.register(OrderedObject, OrderedObjectAdmin) model_admin = OrderedObjectAdmin(OrderedObject, custom_site) counter = 0 if ascending else 51 for page in range(1, 6): request = self._mocked_authenticated_request( "/orderedobject/?p=%s" % page, superuser ) response = model_admin.changelist_view(request) for result in response.context_data["cl"].result_list: counter += 1 if ascending else -1 self.assertEqual(result.id, counter) custom_site.unregister(OrderedObject) # When no order is defined at all, use the model's default ordering # (i.e. 'number'). check_results_order(ascending=True) # When an order field is defined but multiple records have the same # value for that field, make sure everything gets ordered by -pk as well. OrderedObjectAdmin.ordering = ["bool"] check_results_order() # When order fields are defined, including the pk itself, use them. OrderedObjectAdmin.ordering = ["bool", "-pk"] check_results_order() OrderedObjectAdmin.ordering = ["bool", "pk"] check_results_order(ascending=True) OrderedObjectAdmin.ordering = ["-id", "bool"] check_results_order() OrderedObjectAdmin.ordering = ["id", "bool"] check_results_order(ascending=True) @isolate_apps("admin_changelist") def test_total_ordering_optimization(self): class Related(models.Model): unique_field = models.BooleanField(unique=True) class Meta: ordering = ("unique_field",) class Model(models.Model): unique_field = models.BooleanField(unique=True) unique_nullable_field = models.BooleanField(unique=True, null=True) related = models.ForeignKey(Related, models.CASCADE) other_related = models.ForeignKey(Related, models.CASCADE) related_unique = models.OneToOneField(Related, models.CASCADE) field = models.BooleanField() other_field = models.BooleanField() null_field = models.BooleanField(null=True) class Meta: unique_together = { ("field", "other_field"), ("field", "null_field"), ("related", "other_related_id"), } class ModelAdmin(admin.ModelAdmin): def get_queryset(self, request): return Model.objects.none() request = self._mocked_authenticated_request("/", self.superuser) site = admin.AdminSite(name="admin") model_admin = ModelAdmin(Model, site) change_list = model_admin.get_changelist_instance(request) tests = ( ([], ["-pk"]), # Unique non-nullable field. (["unique_field"], ["unique_field"]), (["-unique_field"], ["-unique_field"]), # Unique nullable field. (["unique_nullable_field"], ["unique_nullable_field", "-pk"]), # Field. (["field"], ["field", "-pk"]), # Related field introspection is not implemented. (["related__unique_field"], ["related__unique_field", "-pk"]), # Related attname unique. (["related_unique_id"], ["related_unique_id"]), # Related ordering introspection is not implemented. (["related_unique"], ["related_unique", "-pk"]), # Composite unique. (["field", "-other_field"], ["field", "-other_field"]), # Composite unique nullable. (["-field", "null_field"], ["-field", "null_field", "-pk"]), # Composite unique and nullable. ( ["-field", "null_field", "other_field"], ["-field", "null_field", "other_field"], ), # Composite unique attnames. (["related_id", "-other_related_id"], ["related_id", "-other_related_id"]), # Composite unique names. (["related", "-other_related_id"], ["related", "-other_related_id", "-pk"]), ) # F() objects composite unique. total_ordering = [F("field"), F("other_field").desc(nulls_last=True)] # F() objects composite unique nullable. non_total_ordering = [F("field"), F("null_field").desc(nulls_last=True)] tests += ( (total_ordering, total_ordering), (non_total_ordering, non_total_ordering + ["-pk"]), ) for ordering, expected in tests: with self.subTest(ordering=ordering): self.assertEqual( change_list._get_deterministic_ordering(ordering), expected ) @isolate_apps("admin_changelist") def test_total_ordering_optimization_meta_constraints(self): class Related(models.Model): unique_field = models.BooleanField(unique=True) class Meta: ordering = ("unique_field",) class Model(models.Model): field_1 = models.BooleanField() field_2 = models.BooleanField() field_3 = models.BooleanField() field_4 = models.BooleanField() field_5 = models.BooleanField() field_6 = models.BooleanField() nullable_1 = models.BooleanField(null=True) nullable_2 = models.BooleanField(null=True) related_1 = models.ForeignKey(Related, models.CASCADE) related_2 = models.ForeignKey(Related, models.CASCADE) related_3 = models.ForeignKey(Related, models.CASCADE) related_4 = models.ForeignKey(Related, models.CASCADE) class Meta: constraints = [ *[ models.UniqueConstraint(fields=fields, name="".join(fields)) for fields in ( ["field_1"], ["nullable_1"], ["related_1"], ["related_2_id"], ["field_2", "field_3"], ["field_2", "nullable_2"], ["field_2", "related_3"], ["field_3", "related_4_id"], ) ], models.CheckConstraint(check=models.Q(id__gt=0), name="foo"), models.UniqueConstraint( fields=["field_5"], condition=models.Q(id__gt=10), name="total_ordering_1", ), models.UniqueConstraint( fields=["field_6"], condition=models.Q(), name="total_ordering", ), ] class ModelAdmin(admin.ModelAdmin): def get_queryset(self, request): return Model.objects.none() request = self._mocked_authenticated_request("/", self.superuser) site = admin.AdminSite(name="admin") model_admin = ModelAdmin(Model, site) change_list = model_admin.get_changelist_instance(request) tests = ( # Unique non-nullable field. (["field_1"], ["field_1"]), # Unique nullable field. (["nullable_1"], ["nullable_1", "-pk"]), # Related attname unique. (["related_1_id"], ["related_1_id"]), (["related_2_id"], ["related_2_id"]), # Related ordering introspection is not implemented. (["related_1"], ["related_1", "-pk"]), # Composite unique. (["-field_2", "field_3"], ["-field_2", "field_3"]), # Composite unique nullable. (["field_2", "-nullable_2"], ["field_2", "-nullable_2", "-pk"]), # Composite unique and nullable. ( ["field_2", "-nullable_2", "field_3"], ["field_2", "-nullable_2", "field_3"], ), # Composite field and related field name. (["field_2", "-related_3"], ["field_2", "-related_3", "-pk"]), (["field_3", "related_4"], ["field_3", "related_4", "-pk"]), # Composite field and related field attname. (["field_2", "related_3_id"], ["field_2", "related_3_id"]), (["field_3", "-related_4_id"], ["field_3", "-related_4_id"]), # Partial unique constraint is ignored. (["field_5"], ["field_5", "-pk"]), # Unique constraint with an empty condition. (["field_6"], ["field_6"]), ) for ordering, expected in tests: with self.subTest(ordering=ordering): self.assertEqual( change_list._get_deterministic_ordering(ordering), expected ) def test_dynamic_list_filter(self): """ Regression tests for ticket #17646: dynamic list_filter support. """ parent = Parent.objects.create(name="parent") for i in range(10): Child.objects.create(name="child %s" % i, parent=parent) user_noparents = self._create_superuser("noparents") user_parents = self._create_superuser("parents") # Test with user 'noparents' m = DynamicListFilterChildAdmin(Child, custom_site) request = self._mocked_authenticated_request("/child/", user_noparents) response = m.changelist_view(request) self.assertEqual(response.context_data["cl"].list_filter, ["name", "age"]) # Test with user 'parents' m = DynamicListFilterChildAdmin(Child, custom_site) request = self._mocked_authenticated_request("/child/", user_parents) response = m.changelist_view(request) self.assertEqual( response.context_data["cl"].list_filter, ("parent", "name", "age") ) def test_dynamic_search_fields(self): child = self._create_superuser("child") m = DynamicSearchFieldsChildAdmin(Child, custom_site) request = self._mocked_authenticated_request("/child/", child) response = m.changelist_view(request) self.assertEqual(response.context_data["cl"].search_fields, ("name", "age")) def test_pagination_page_range(self): """ Regression tests for ticket #15653: ensure the number of pages generated for changelist views are correct. """ # instantiating and setting up ChangeList object m = GroupAdmin(Group, custom_site) request = self.factory.get("/group/") request.user = self.superuser cl = m.get_changelist_instance(request) cl.list_per_page = 10 ELLIPSIS = cl.paginator.ELLIPSIS for number, pages, expected in [ (1, 1, []), (1, 2, [1, 2]), (6, 11, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]), (6, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]), (6, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, ELLIPSIS, 12, 13]), (7, 12, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]), (7, 13, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]), (7, 14, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, ELLIPSIS, 13, 14]), (8, 13, [1, 2, ELLIPSIS, 5, 6, 7, 8, 9, 10, 11, 12, 13]), (8, 14, [1, 2, ELLIPSIS, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]), (8, 15, [1, 2, ELLIPSIS, 5, 6, 7, 8, 9, 10, 11, ELLIPSIS, 14, 15]), ]: with self.subTest(number=number, pages=pages): # assuming exactly `pages * cl.list_per_page` objects Group.objects.all().delete() for i in range(pages * cl.list_per_page): Group.objects.create(name="test band") # setting page number and calculating page range cl.page_num = number cl.get_results(request) self.assertEqual(list(pagination(cl)["page_range"]), expected) def test_object_tools_displayed_no_add_permission(self): """ When ModelAdmin.has_add_permission() returns False, the object-tools block is still shown. """ superuser = self._create_superuser("superuser") m = EventAdmin(Event, custom_site) request = self._mocked_authenticated_request("/event/", superuser) self.assertFalse(m.has_add_permission(request)) response = m.changelist_view(request) self.assertIn('<ul class="object-tools">', response.rendered_content) # The "Add" button inside the object-tools shouldn't appear. self.assertNotIn("Add ", response.rendered_content) def test_search_help_text(self): superuser = self._create_superuser("superuser") m = BandAdmin(Band, custom_site) # search_fields without search_help_text. m.search_fields = ["name"] request = self._mocked_authenticated_request("/band/", superuser) response = m.changelist_view(request) self.assertIsNone(response.context_data["cl"].search_help_text) self.assertNotContains(response, '<div class="help id="searchbar_helptext">') # search_fields with search_help_text. m.search_help_text = "Search help text" request = self._mocked_authenticated_request("/band/", superuser) response = m.changelist_view(request) self.assertEqual( response.context_data["cl"].search_help_text, "Search help text" ) self.assertContains( response, '<div class="help" id="searchbar_helptext">Search help text</div>' ) self.assertContains( response, '<input type="text" size="40" name="q" value="" id="searchbar" ' 'aria-describedby="searchbar_helptext">', ) class GetAdminLogTests(TestCase): def test_custom_user_pk_not_named_id(self): """ {% get_admin_log %} works if the user model's primary key isn't named 'id'. """ context = Context({"user": CustomIdUser()}) template = Template( "{% load log %}{% get_admin_log 10 as admin_log for_user user %}" ) # This template tag just logs. self.assertEqual(template.render(context), "") def test_no_user(self): """{% get_admin_log %} works without specifying a user.""" user = User(username="jondoe", password="secret", email="[email protected]") user.save() ct = ContentType.objects.get_for_model(User) LogEntry.objects.log_action(user.pk, ct.pk, user.pk, repr(user), 1) t = Template( "{% load log %}" "{% get_admin_log 100 as admin_log %}" "{% for entry in admin_log %}" "{{ entry|safe }}" "{% endfor %}" ) self.assertEqual(t.render(Context({})), "Added “<User: jondoe>”.") def test_missing_args(self): msg = "'get_admin_log' statements require two arguments" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template("{% load log %}{% get_admin_log 10 as %}") def test_non_integer_limit(self): msg = "First argument to 'get_admin_log' must be an integer" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template( '{% load log %}{% get_admin_log "10" as admin_log for_user user %}' ) def test_without_as(self): msg = "Second argument to 'get_admin_log' must be 'as'" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template("{% load log %}{% get_admin_log 10 ad admin_log for_user user %}") def test_without_for_user(self): msg = "Fourth argument to 'get_admin_log' must be 'for_user'" with self.assertRaisesMessage(TemplateSyntaxError, msg): Template("{% load log %}{% get_admin_log 10 as admin_log foruser user %}") @override_settings(ROOT_URLCONF="admin_changelist.urls") class SeleniumTests(AdminSeleniumTestCase): available_apps = ["admin_changelist"] + AdminSeleniumTestCase.available_apps def setUp(self): User.objects.create_superuser(username="super", password="secret", email=None) def test_add_row_selection(self): """ The status line for selected rows gets updated correctly (#22038). """ from selenium.webdriver.common.by import By self.admin_login(username="super", password="secret") self.selenium.get(self.live_server_url + reverse("admin:auth_user_changelist")) form_id = "#changelist-form" # Test amount of rows in the Changelist rows = self.selenium.find_elements( By.CSS_SELECTOR, "%s #result_list tbody tr" % form_id ) self.assertEqual(len(rows), 1) row = rows[0] selection_indicator = self.selenium.find_element( By.CSS_SELECTOR, "%s .action-counter" % form_id ) all_selector = self.selenium.find_element(By.ID, "action-toggle") row_selector = self.selenium.find_element( By.CSS_SELECTOR, "%s #result_list tbody tr:first-child .action-select" % form_id, ) # Test current selection self.assertEqual(selection_indicator.text, "0 of 1 selected") self.assertIs(all_selector.get_property("checked"), False) self.assertEqual(row.get_attribute("class"), "") # Select a row and check again row_selector.click() self.assertEqual(selection_indicator.text, "1 of 1 selected") self.assertIs(all_selector.get_property("checked"), True) self.assertEqual(row.get_attribute("class"), "selected") # Deselect a row and check again row_selector.click() self.assertEqual(selection_indicator.text, "0 of 1 selected") self.assertIs(all_selector.get_property("checked"), False) self.assertEqual(row.get_attribute("class"), "") def test_modifier_allows_multiple_section(self): """ Selecting a row and then selecting another row whilst holding shift should select all rows in-between. """ from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys Parent.objects.bulk_create([Parent(name="parent%d" % i) for i in range(5)]) self.admin_login(username="super", password="secret") self.selenium.get( self.live_server_url + reverse("admin:admin_changelist_parent_changelist") ) checkboxes = self.selenium.find_elements( By.CSS_SELECTOR, "tr input.action-select" ) self.assertEqual(len(checkboxes), 5) for c in checkboxes: self.assertIs(c.get_property("checked"), False) # Check first row. Hold-shift and check next-to-last row. checkboxes[0].click() ActionChains(self.selenium).key_down(Keys.SHIFT).click(checkboxes[-2]).key_up( Keys.SHIFT ).perform() for c in checkboxes[:-2]: self.assertIs(c.get_property("checked"), True) self.assertIs(checkboxes[-1].get_property("checked"), False) def test_select_all_across_pages(self): from selenium.webdriver.common.by import By Parent.objects.bulk_create([Parent(name="parent%d" % i) for i in range(101)]) self.admin_login(username="super", password="secret") self.selenium.get( self.live_server_url + reverse("admin:admin_changelist_parent_changelist") ) selection_indicator = self.selenium.find_element( By.CSS_SELECTOR, ".action-counter" ) select_all_indicator = self.selenium.find_element( By.CSS_SELECTOR, ".actions .all" ) question = self.selenium.find_element(By.CSS_SELECTOR, ".actions > .question") clear = self.selenium.find_element(By.CSS_SELECTOR, ".actions > .clear") select_all = self.selenium.find_element(By.ID, "action-toggle") select_across = self.selenium.find_elements(By.NAME, "select_across") self.assertIs(question.is_displayed(), False) self.assertIs(clear.is_displayed(), False) self.assertIs(select_all.get_property("checked"), False) for hidden_input in select_across: self.assertEqual(hidden_input.get_property("value"), "0") self.assertIs(selection_indicator.is_displayed(), True) self.assertEqual(selection_indicator.text, "0 of 100 selected") self.assertIs(select_all_indicator.is_displayed(), False) select_all.click() self.assertIs(question.is_displayed(), True) self.assertIs(clear.is_displayed(), False) self.assertIs(select_all.get_property("checked"), True) for hidden_input in select_across: self.assertEqual(hidden_input.get_property("value"), "0") self.assertIs(selection_indicator.is_displayed(), True) self.assertEqual(selection_indicator.text, "100 of 100 selected") self.assertIs(select_all_indicator.is_displayed(), False) question.click() self.assertIs(question.is_displayed(), False) self.assertIs(clear.is_displayed(), True) self.assertIs(select_all.get_property("checked"), True) for hidden_input in select_across: self.assertEqual(hidden_input.get_property("value"), "1") self.assertIs(selection_indicator.is_displayed(), False) self.assertIs(select_all_indicator.is_displayed(), True) clear.click() self.assertIs(question.is_displayed(), False) self.assertIs(clear.is_displayed(), False) self.assertIs(select_all.get_property("checked"), False) for hidden_input in select_across: self.assertEqual(hidden_input.get_property("value"), "0") self.assertIs(selection_indicator.is_displayed(), True) self.assertEqual(selection_indicator.text, "0 of 100 selected") self.assertIs(select_all_indicator.is_displayed(), False) def test_actions_warn_on_pending_edits(self): from selenium.webdriver.common.by import By Parent.objects.create(name="foo") self.admin_login(username="super", password="secret") self.selenium.get( self.live_server_url + reverse("admin:admin_changelist_parent_changelist") ) name_input = self.selenium.find_element(By.ID, "id_form-0-name") name_input.clear() name_input.send_keys("bar") self.selenium.find_element(By.ID, "action-toggle").click() self.selenium.find_element(By.NAME, "index").click() # Go alert = self.selenium.switch_to.alert try: self.assertEqual( alert.text, "You have unsaved changes on individual editable fields. If you " "run an action, your unsaved changes will be lost.", ) finally: alert.dismiss() def test_save_with_changes_warns_on_pending_action(self): from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select Parent.objects.create(name="parent") self.admin_login(username="super", password="secret") self.selenium.get( self.live_server_url + reverse("admin:admin_changelist_parent_changelist") ) name_input = self.selenium.find_element(By.ID, "id_form-0-name") name_input.clear() name_input.send_keys("other name") Select(self.selenium.find_element(By.NAME, "action")).select_by_value( "delete_selected" ) self.selenium.find_element(By.NAME, "_save").click() alert = self.selenium.switch_to.alert try: self.assertEqual( alert.text, "You have selected an action, but you haven’t saved your " "changes to individual fields yet. Please click OK to save. " "You’ll need to re-run the action.", ) finally: alert.dismiss() def test_save_without_changes_warns_on_pending_action(self): from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select Parent.objects.create(name="parent") self.admin_login(username="super", password="secret") self.selenium.get( self.live_server_url + reverse("admin:admin_changelist_parent_changelist") ) Select(self.selenium.find_element(By.NAME, "action")).select_by_value( "delete_selected" ) self.selenium.find_element(By.NAME, "_save").click() alert = self.selenium.switch_to.alert try: self.assertEqual( alert.text, "You have selected an action, and you haven’t made any " "changes on individual fields. You’re probably looking for " "the Go button rather than the Save button.", ) finally: alert.dismiss() def test_collapse_filters(self): from selenium.webdriver.common.by import By self.admin_login(username="super", password="secret") self.selenium.get(self.live_server_url + reverse("admin:auth_user_changelist")) # The UserAdmin has 3 field filters by default: "staff status", # "superuser status", and "active". details = self.selenium.find_elements(By.CSS_SELECTOR, "details") # All filters are opened at first. for detail in details: self.assertTrue(detail.get_attribute("open")) # Collapse "staff' and "superuser" filters. for detail in details[:2]: summary = detail.find_element(By.CSS_SELECTOR, "summary") summary.click() self.assertFalse(detail.get_attribute("open")) # Filters are in the same state after refresh. self.selenium.refresh() self.assertFalse( self.selenium.find_element( By.CSS_SELECTOR, "[data-filter-title='staff status']" ).get_attribute("open") ) self.assertFalse( self.selenium.find_element( By.CSS_SELECTOR, "[data-filter-title='superuser status']" ).get_attribute("open") ) self.assertTrue( self.selenium.find_element( By.CSS_SELECTOR, "[data-filter-title='active']" ).get_attribute("open") ) # Collapse a filter on another view (Bands). self.selenium.get( self.live_server_url + reverse("admin:admin_changelist_band_changelist") ) self.selenium.find_element(By.CSS_SELECTOR, "summary").click() # Go to Users view and then, back again to Bands view. self.selenium.get(self.live_server_url + reverse("admin:auth_user_changelist")) self.selenium.get( self.live_server_url + reverse("admin:admin_changelist_band_changelist") ) # The filter remains in the same state. self.assertFalse( self.selenium.find_element( By.CSS_SELECTOR, "[data-filter-title='number of members']", ).get_attribute("open") ) def test_collapse_filter_with_unescaped_title(self): from selenium.webdriver.common.by import By self.admin_login(username="super", password="secret") changelist_url = reverse("admin:admin_changelist_proxyuser_changelist") self.selenium.get(self.live_server_url + changelist_url) # Title is escaped. filter_title = self.selenium.find_element( By.CSS_SELECTOR, "[data-filter-title='It\\'s OK']" ) filter_title.find_element(By.CSS_SELECTOR, "summary").click() self.assertFalse(filter_title.get_attribute("open")) # Filter is in the same state after refresh. self.selenium.refresh() self.assertFalse( self.selenium.find_element( By.CSS_SELECTOR, "[data-filter-title='It\\'s OK']" ).get_attribute("open") )
4485a26b2fcdccedd1870bcaffc63c80233ce72e748339aa8f09739d37cd9200
import uuid from django.contrib.auth.models import User from django.db import models class Event(models.Model): # Oracle can have problems with a column named "date" date = models.DateField(db_column="event_date") class Parent(models.Model): name = models.CharField(max_length=128) class Child(models.Model): parent = models.ForeignKey(Parent, models.SET_NULL, editable=False, null=True) name = models.CharField(max_length=30, blank=True) age = models.IntegerField(null=True, blank=True) class Genre(models.Model): name = models.CharField(max_length=20) class Band(models.Model): name = models.CharField(max_length=20) nr_of_members = models.PositiveIntegerField() genres = models.ManyToManyField(Genre) class Musician(models.Model): name = models.CharField(max_length=30) age = models.IntegerField(null=True, blank=True) def __str__(self): return self.name class Group(models.Model): name = models.CharField(max_length=30) members = models.ManyToManyField(Musician, through="Membership") def __str__(self): return self.name class Concert(models.Model): name = models.CharField(max_length=30) group = models.ForeignKey(Group, models.CASCADE) class Membership(models.Model): music = models.ForeignKey(Musician, models.CASCADE) group = models.ForeignKey(Group, models.CASCADE) role = models.CharField(max_length=15) class Quartet(Group): pass class ChordsMusician(Musician): pass class ChordsBand(models.Model): name = models.CharField(max_length=30) members = models.ManyToManyField(ChordsMusician, through="Invitation") class Invitation(models.Model): player = models.ForeignKey(ChordsMusician, models.CASCADE) band = models.ForeignKey(ChordsBand, models.CASCADE) instrument = models.CharField(max_length=15) class Swallow(models.Model): uuid = models.UUIDField(primary_key=True, default=uuid.uuid4) origin = models.CharField(max_length=255) load = models.FloatField() speed = models.FloatField() class Meta: ordering = ("speed", "load") class SwallowOneToOne(models.Model): swallow = models.OneToOneField(Swallow, models.CASCADE) class UnorderedObject(models.Model): """ Model without any defined `Meta.ordering`. Refs #17198. """ bool = models.BooleanField(default=True) class OrderedObjectManager(models.Manager): def get_queryset(self): return super().get_queryset().order_by("number") class OrderedObject(models.Model): """ Model with Manager that defines a default order. Refs #17198. """ name = models.CharField(max_length=255) bool = models.BooleanField(default=True) number = models.IntegerField(default=0, db_column="number_val") objects = OrderedObjectManager() class CustomIdUser(models.Model): uuid = models.AutoField(primary_key=True) class CharPK(models.Model): char_pk = models.CharField(max_length=100, primary_key=True) class ProxyUser(User): class Meta: proxy = True
302a089ad03c7e4480e8c5ad6448d328ee308c0989c52a913b029577215ef9de
from django.contrib import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.core.paginator import Paginator from .models import Band, Child, Event, Parent, ProxyUser, Swallow site = admin.AdminSite(name="admin") site.register(User, UserAdmin) class CustomPaginator(Paginator): def __init__(self, queryset, page_size, orphans=0, allow_empty_first_page=True): super().__init__( queryset, 5, orphans=2, allow_empty_first_page=allow_empty_first_page ) class EventAdmin(admin.ModelAdmin): date_hierarchy = "date" list_display = ["event_date_func"] @admin.display def event_date_func(self, event): return event.date def has_add_permission(self, request): return False site.register(Event, EventAdmin) class ParentAdmin(admin.ModelAdmin): list_filter = ["child__name"] search_fields = ["child__name"] list_select_related = ["child"] class ParentAdminTwoSearchFields(admin.ModelAdmin): list_filter = ["child__name"] search_fields = ["child__name", "child__age"] list_select_related = ["child"] class ChildAdmin(admin.ModelAdmin): list_display = ["name", "parent"] list_per_page = 10 list_filter = ["parent", "age"] def get_queryset(self, request): return super().get_queryset(request).select_related("parent") class CustomPaginationAdmin(ChildAdmin): paginator = CustomPaginator class FilteredChildAdmin(admin.ModelAdmin): list_display = ["name", "parent"] list_per_page = 10 def get_queryset(self, request): return super().get_queryset(request).filter(name__contains="filtered") class BandAdmin(admin.ModelAdmin): list_filter = ["genres"] class NrOfMembersFilter(admin.SimpleListFilter): title = "number of members" parameter_name = "nr_of_members_partition" def lookups(self, request, model_admin): return [ ("5", "0 - 5"), ("more", "more than 5"), ] def queryset(self, request, queryset): value = self.value() if value == "5": return queryset.filter(nr_of_members__lte=5) if value == "more": return queryset.filter(nr_of_members__gt=5) class BandCallableFilterAdmin(admin.ModelAdmin): list_filter = [NrOfMembersFilter] site.register(Band, BandCallableFilterAdmin) class GroupAdmin(admin.ModelAdmin): list_filter = ["members"] class ConcertAdmin(admin.ModelAdmin): list_filter = ["group__members"] search_fields = ["group__members__name"] class QuartetAdmin(admin.ModelAdmin): list_filter = ["members"] class ChordsBandAdmin(admin.ModelAdmin): list_filter = ["members"] class InvitationAdmin(admin.ModelAdmin): list_display = ("band", "player") list_select_related = ("player",) class DynamicListDisplayChildAdmin(admin.ModelAdmin): list_display = ("parent", "name", "age") def get_list_display(self, request): my_list_display = super().get_list_display(request) if request.user.username == "noparents": my_list_display = list(my_list_display) my_list_display.remove("parent") return my_list_display class DynamicListDisplayLinksChildAdmin(admin.ModelAdmin): list_display = ("parent", "name", "age") list_display_links = ["parent", "name"] def get_list_display_links(self, request, list_display): return ["age"] site.register(Child, DynamicListDisplayChildAdmin) class NoListDisplayLinksParentAdmin(admin.ModelAdmin): list_display_links = None list_display = ["name"] list_editable = ["name"] actions_on_bottom = True site.register(Parent, NoListDisplayLinksParentAdmin) class SwallowAdmin(admin.ModelAdmin): actions = None # prevent ['action_checkbox'] + list(list_display) list_display = ("origin", "load", "speed", "swallowonetoone") list_editable = ["load", "speed"] list_per_page = 3 site.register(Swallow, SwallowAdmin) class DynamicListFilterChildAdmin(admin.ModelAdmin): list_filter = ("parent", "name", "age") def get_list_filter(self, request): my_list_filter = super().get_list_filter(request) if request.user.username == "noparents": my_list_filter = list(my_list_filter) my_list_filter.remove("parent") return my_list_filter class DynamicSearchFieldsChildAdmin(admin.ModelAdmin): search_fields = ("name",) def get_search_fields(self, request): search_fields = super().get_search_fields(request) search_fields += ("age",) return search_fields class EmptyValueChildAdmin(admin.ModelAdmin): empty_value_display = "-empty-" list_display = ("name", "age_display", "age") @admin.display(empty_value="&dagger;") def age_display(self, obj): return obj.age class UnescapedTitleFilter(admin.SimpleListFilter): title = "It's OK" parameter_name = "is_active" def lookups(self, request, model_admin): return [("yes", "yes"), ("no", "no")] def queryset(self, request, queryset): if self.value() == "yes": return queryset.filter(is_active=True) else: return queryset.filter(is_active=False) class CustomUserAdmin(UserAdmin): list_filter = [UnescapedTitleFilter] site.register(ProxyUser, CustomUserAdmin)
a3cd94b83a8f12e1088fe2b8539bab8abe0d23114882ab61b08b32c6bf43afb9
from io import StringIO from django.apps import apps from django.core import management from django.db import migrations from django.db.models import signals from django.test import TransactionTestCase, override_settings APP_CONFIG = apps.get_app_config("migrate_signals") SIGNAL_ARGS = [ "app_config", "verbosity", "interactive", "using", "stdout", "plan", "apps", ] MIGRATE_DATABASE = "default" MIGRATE_VERBOSITY = 0 MIGRATE_INTERACTIVE = False class Receiver: def __init__(self, signal): self.call_counter = 0 self.call_args = None signal.connect(self, sender=APP_CONFIG) def __call__(self, signal, sender, **kwargs): self.call_counter += 1 self.call_args = kwargs class OneTimeReceiver: """ Special receiver for handle the fact that test runner calls migrate for several databases and several times for some of them. """ def __init__(self, signal): self.signal = signal self.call_counter = 0 self.call_args = None self.signal.connect(self, sender=APP_CONFIG) def __call__(self, signal, sender, **kwargs): # Although test runner calls migrate for several databases, # testing for only one of them is quite sufficient. if kwargs["using"] == MIGRATE_DATABASE: self.call_counter += 1 self.call_args = kwargs # we need to test only one call of migrate self.signal.disconnect(self, sender=APP_CONFIG) # We connect receiver here and not in unit test code because we need to # connect receiver before test runner creates database. That is, sequence of # actions would be: # # 1. Test runner imports this module. # 2. We connect receiver. # 3. Test runner calls migrate for create default database. # 4. Test runner execute our unit test code. pre_migrate_receiver = OneTimeReceiver(signals.pre_migrate) post_migrate_receiver = OneTimeReceiver(signals.post_migrate) class MigrateSignalTests(TransactionTestCase): available_apps = ["migrate_signals"] def test_call_time(self): self.assertEqual(pre_migrate_receiver.call_counter, 1) self.assertEqual(post_migrate_receiver.call_counter, 1) def test_args(self): pre_migrate_receiver = Receiver(signals.pre_migrate) post_migrate_receiver = Receiver(signals.post_migrate) management.call_command( "migrate", database=MIGRATE_DATABASE, verbosity=MIGRATE_VERBOSITY, interactive=MIGRATE_INTERACTIVE, stdout=StringIO("test_args"), ) for receiver in [pre_migrate_receiver, post_migrate_receiver]: with self.subTest(receiver=receiver): args = receiver.call_args self.assertEqual(receiver.call_counter, 1) self.assertEqual(set(args), set(SIGNAL_ARGS)) self.assertEqual(args["app_config"], APP_CONFIG) self.assertEqual(args["verbosity"], MIGRATE_VERBOSITY) self.assertEqual(args["interactive"], MIGRATE_INTERACTIVE) self.assertEqual(args["using"], "default") self.assertIn("test_args", args["stdout"].getvalue()) self.assertEqual(args["plan"], []) self.assertIsInstance(args["apps"], migrations.state.StateApps) @override_settings( MIGRATION_MODULES={"migrate_signals": "migrate_signals.custom_migrations"} ) def test_migrations_only(self): """ If all apps have migrations, migration signals should be sent. """ pre_migrate_receiver = Receiver(signals.pre_migrate) post_migrate_receiver = Receiver(signals.post_migrate) management.call_command( "migrate", database=MIGRATE_DATABASE, verbosity=MIGRATE_VERBOSITY, interactive=MIGRATE_INTERACTIVE, ) for receiver in [pre_migrate_receiver, post_migrate_receiver]: args = receiver.call_args self.assertEqual(receiver.call_counter, 1) self.assertEqual(set(args), set(SIGNAL_ARGS)) self.assertEqual(args["app_config"], APP_CONFIG) self.assertEqual(args["verbosity"], MIGRATE_VERBOSITY) self.assertEqual(args["interactive"], MIGRATE_INTERACTIVE) self.assertEqual(args["using"], "default") self.assertIsInstance(args["plan"][0][0], migrations.Migration) # The migration isn't applied backward. self.assertFalse(args["plan"][0][1]) self.assertIsInstance(args["apps"], migrations.state.StateApps) self.assertEqual(pre_migrate_receiver.call_args["apps"].get_models(), []) self.assertEqual( [ model._meta.label for model in post_migrate_receiver.call_args["apps"].get_models() ], ["migrate_signals.Signal"], ) # Migrating with an empty plan. pre_migrate_receiver = Receiver(signals.pre_migrate) post_migrate_receiver = Receiver(signals.post_migrate) management.call_command( "migrate", database=MIGRATE_DATABASE, verbosity=MIGRATE_VERBOSITY, interactive=MIGRATE_INTERACTIVE, ) self.assertEqual( [ model._meta.label for model in pre_migrate_receiver.call_args["apps"].get_models() ], ["migrate_signals.Signal"], ) self.assertEqual( [ model._meta.label for model in post_migrate_receiver.call_args["apps"].get_models() ], ["migrate_signals.Signal"], ) # Migrating with an empty plan and --check doesn't emit signals. pre_migrate_receiver = Receiver(signals.pre_migrate) post_migrate_receiver = Receiver(signals.post_migrate) management.call_command( "migrate", database=MIGRATE_DATABASE, verbosity=MIGRATE_VERBOSITY, interactive=MIGRATE_INTERACTIVE, check_unapplied=True, ) self.assertEqual(pre_migrate_receiver.call_counter, 0) self.assertEqual(post_migrate_receiver.call_counter, 0)
2c9e4d3e00ad4b0ecf1a9a0659d8e98823527e956fcb963111554fd4a2785a44
import gettext import os import re import zoneinfo from datetime import datetime, timedelta from importlib import import_module from unittest import skipUnless from django import forms from django.conf import settings from django.contrib import admin from django.contrib.admin import widgets from django.contrib.admin.tests import AdminSeleniumTestCase from django.contrib.auth.models import User from django.core.files.storage import default_storage from django.core.files.uploadedfile import SimpleUploadedFile from django.db.models import ( CharField, DateField, DateTimeField, ForeignKey, ManyToManyField, UUIDField, ) from django.test import SimpleTestCase, TestCase, override_settings from django.urls import reverse from django.utils import translation from .models import ( Advisor, Album, Band, Bee, Car, Company, Event, Honeycomb, Image, Individual, Inventory, Member, MyFileField, Profile, ReleaseEvent, School, Student, UnsafeLimitChoicesTo, VideoStream, ) from .widgetadmin import site as widget_admin_site class TestDataMixin: @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser( username="super", password="secret", email=None ) cls.u2 = User.objects.create_user(username="testser", password="secret") Car.objects.create(owner=cls.superuser, make="Volkswagen", model="Passat") Car.objects.create(owner=cls.u2, make="BMW", model="M3") class AdminFormfieldForDBFieldTests(SimpleTestCase): """ Tests for correct behavior of ModelAdmin.formfield_for_dbfield """ def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides): """ Helper to call formfield_for_dbfield for a given model and field name and verify that the returned formfield is appropriate. """ # Override any settings on the model admin class MyModelAdmin(admin.ModelAdmin): pass for k in admin_overrides: setattr(MyModelAdmin, k, admin_overrides[k]) # Construct the admin, and ask it for a formfield ma = MyModelAdmin(model, admin.site) ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None) # "unwrap" the widget wrapper, if needed if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper): widget = ff.widget.widget else: widget = ff.widget self.assertIsInstance(widget, widgetclass) # Return the formfield so that other tests can continue return ff def test_DateField(self): self.assertFormfield(Event, "start_date", widgets.AdminDateWidget) def test_DateTimeField(self): self.assertFormfield(Member, "birthdate", widgets.AdminSplitDateTime) def test_TimeField(self): self.assertFormfield(Event, "start_time", widgets.AdminTimeWidget) def test_TextField(self): self.assertFormfield(Event, "description", widgets.AdminTextareaWidget) def test_URLField(self): self.assertFormfield(Event, "link", widgets.AdminURLFieldWidget) def test_IntegerField(self): self.assertFormfield(Event, "min_age", widgets.AdminIntegerFieldWidget) def test_CharField(self): self.assertFormfield(Member, "name", widgets.AdminTextInputWidget) def test_EmailField(self): self.assertFormfield(Member, "email", widgets.AdminEmailInputWidget) def test_FileField(self): self.assertFormfield(Album, "cover_art", widgets.AdminFileWidget) def test_ForeignKey(self): self.assertFormfield(Event, "main_band", forms.Select) def test_raw_id_ForeignKey(self): self.assertFormfield( Event, "main_band", widgets.ForeignKeyRawIdWidget, raw_id_fields=["main_band"], ) def test_radio_fields_ForeignKey(self): ff = self.assertFormfield( Event, "main_band", widgets.AdminRadioSelect, radio_fields={"main_band": admin.VERTICAL}, ) self.assertIsNone(ff.empty_label) def test_radio_fields_foreignkey_formfield_overrides_empty_label(self): class MyModelAdmin(admin.ModelAdmin): radio_fields = {"parent": admin.VERTICAL} formfield_overrides = { ForeignKey: {"empty_label": "Custom empty label"}, } ma = MyModelAdmin(Inventory, admin.site) ff = ma.formfield_for_dbfield(Inventory._meta.get_field("parent"), request=None) self.assertEqual(ff.empty_label, "Custom empty label") def test_many_to_many(self): self.assertFormfield(Band, "members", forms.SelectMultiple) def test_raw_id_many_to_many(self): self.assertFormfield( Band, "members", widgets.ManyToManyRawIdWidget, raw_id_fields=["members"] ) def test_filtered_many_to_many(self): self.assertFormfield( Band, "members", widgets.FilteredSelectMultiple, filter_vertical=["members"] ) def test_formfield_overrides(self): self.assertFormfield( Event, "start_date", forms.TextInput, formfield_overrides={DateField: {"widget": forms.TextInput}}, ) def test_formfield_overrides_widget_instances(self): """ Widget instances in formfield_overrides are not shared between different fields. (#19423) """ class BandAdmin(admin.ModelAdmin): formfield_overrides = { CharField: {"widget": forms.TextInput(attrs={"size": "10"})} } ma = BandAdmin(Band, admin.site) f1 = ma.formfield_for_dbfield(Band._meta.get_field("name"), request=None) f2 = ma.formfield_for_dbfield(Band._meta.get_field("style"), request=None) self.assertNotEqual(f1.widget, f2.widget) self.assertEqual(f1.widget.attrs["maxlength"], "100") self.assertEqual(f2.widget.attrs["maxlength"], "20") self.assertEqual(f2.widget.attrs["size"], "10") def test_formfield_overrides_m2m_filter_widget(self): """ The autocomplete_fields, raw_id_fields, filter_vertical, and filter_horizontal widgets for ManyToManyFields may be overridden by specifying a widget in formfield_overrides. """ class BandAdmin(admin.ModelAdmin): filter_vertical = ["members"] formfield_overrides = { ManyToManyField: {"widget": forms.CheckboxSelectMultiple}, } ma = BandAdmin(Band, admin.site) field = ma.formfield_for_dbfield(Band._meta.get_field("members"), request=None) self.assertIsInstance(field.widget.widget, forms.CheckboxSelectMultiple) def test_formfield_overrides_for_datetime_field(self): """ Overriding the widget for DateTimeField doesn't overrides the default form_class for that field (#26449). """ class MemberAdmin(admin.ModelAdmin): formfield_overrides = { DateTimeField: {"widget": widgets.AdminSplitDateTime} } ma = MemberAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Member._meta.get_field("birthdate"), request=None) self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime) self.assertIsInstance(f1, forms.SplitDateTimeField) def test_formfield_overrides_for_custom_field(self): """ formfield_overrides works for a custom field class. """ class AlbumAdmin(admin.ModelAdmin): formfield_overrides = {MyFileField: {"widget": forms.TextInput()}} ma = AlbumAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield( Album._meta.get_field("backside_art"), request=None ) self.assertIsInstance(f1.widget, forms.TextInput) def test_field_with_choices(self): self.assertFormfield(Member, "gender", forms.Select) def test_choices_with_radio_fields(self): self.assertFormfield( Member, "gender", widgets.AdminRadioSelect, radio_fields={"gender": admin.VERTICAL}, ) def test_inheritance(self): self.assertFormfield(Album, "backside_art", widgets.AdminFileWidget) def test_m2m_widgets(self): """m2m fields help text as it applies to admin app (#9321).""" class AdvisorAdmin(admin.ModelAdmin): filter_vertical = ["companies"] self.assertFormfield( Advisor, "companies", widgets.FilteredSelectMultiple, filter_vertical=["companies"], ) ma = AdvisorAdmin(Advisor, admin.site) f = ma.formfield_for_dbfield(Advisor._meta.get_field("companies"), request=None) self.assertEqual( f.help_text, "Hold down “Control”, or “Command” on a Mac, to select more than one.", ) def test_m2m_widgets_no_allow_multiple_selected(self): class NoAllowMultipleSelectedWidget(forms.SelectMultiple): allow_multiple_selected = False class AdvisorAdmin(admin.ModelAdmin): filter_vertical = ["companies"] formfield_overrides = { ManyToManyField: {"widget": NoAllowMultipleSelectedWidget}, } self.assertFormfield( Advisor, "companies", widgets.FilteredSelectMultiple, filter_vertical=["companies"], ) ma = AdvisorAdmin(Advisor, admin.site) f = ma.formfield_for_dbfield(Advisor._meta.get_field("companies"), request=None) self.assertEqual(f.help_text, "") @override_settings(ROOT_URLCONF="admin_widgets.urls") class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase): def test_filter_choices_by_request_user(self): """ Ensure the user can only see their own cars in the foreign key dropdown. """ self.client.force_login(self.superuser) response = self.client.get(reverse("admin:admin_widgets_cartire_add")) self.assertNotContains(response, "BMW M3") self.assertContains(response, "Volkswagen Passat") @override_settings(ROOT_URLCONF="admin_widgets.urls") class AdminForeignKeyWidgetChangeList(TestDataMixin, TestCase): def setUp(self): self.client.force_login(self.superuser) def test_changelist_ForeignKey(self): response = self.client.get(reverse("admin:admin_widgets_car_changelist")) self.assertContains(response, "/auth/user/add/") @override_settings(ROOT_URLCONF="admin_widgets.urls") class AdminForeignKeyRawIdWidget(TestDataMixin, TestCase): def setUp(self): self.client.force_login(self.superuser) def test_nonexistent_target_id(self): band = Band.objects.create(name="Bogey Blues") pk = band.pk band.delete() post_data = { "main_band": str(pk), } # Try posting with a nonexistent pk in a raw id field: this # should result in an error message, not a server exception. response = self.client.post(reverse("admin:admin_widgets_event_add"), post_data) self.assertContains( response, "Select a valid choice. That choice is not one of the available choices.", ) def test_invalid_target_id(self): for test_str in ("Iñtërnâtiônàlizætiøn", "1234'", -1234): # This should result in an error message, not a server exception. response = self.client.post( reverse("admin:admin_widgets_event_add"), {"main_band": test_str} ) self.assertContains( response, "Select a valid choice. That choice is not one of the available " "choices.", ) def test_url_params_from_lookup_dict_any_iterable(self): lookup1 = widgets.url_params_from_lookup_dict({"color__in": ("red", "blue")}) lookup2 = widgets.url_params_from_lookup_dict({"color__in": ["red", "blue"]}) self.assertEqual(lookup1, {"color__in": "red,blue"}) self.assertEqual(lookup1, lookup2) def test_url_params_from_lookup_dict_callable(self): def my_callable(): return "works" lookup1 = widgets.url_params_from_lookup_dict({"myfield": my_callable}) lookup2 = widgets.url_params_from_lookup_dict({"myfield": my_callable()}) self.assertEqual(lookup1, lookup2) def test_label_and_url_for_value_invalid_uuid(self): field = Bee._meta.get_field("honeycomb") self.assertIsInstance(field.target_field, UUIDField) widget = widgets.ForeignKeyRawIdWidget(field.remote_field, admin.site) self.assertEqual(widget.label_and_url_for_value("invalid-uuid"), ("", "")) class FilteredSelectMultipleWidgetTest(SimpleTestCase): def test_render(self): # Backslash in verbose_name to ensure it is JavaScript escaped. w = widgets.FilteredSelectMultiple("test\\", False) self.assertHTMLEqual( w.render("test", "test"), '<select multiple name="test" class="selectfilter" ' 'data-field-name="test\\" data-is-stacked="0">\n</select>', ) def test_stacked_render(self): # Backslash in verbose_name to ensure it is JavaScript escaped. w = widgets.FilteredSelectMultiple("test\\", True) self.assertHTMLEqual( w.render("test", "test"), '<select multiple name="test" class="selectfilterstacked" ' 'data-field-name="test\\" data-is-stacked="1">\n</select>', ) class AdminDateWidgetTest(SimpleTestCase): def test_attrs(self): w = widgets.AdminDateWidget() self.assertHTMLEqual( w.render("test", datetime(2007, 12, 1, 9, 30)), '<input value="2007-12-01" type="text" class="vDateField" name="test" ' 'size="10">', ) # pass attrs to widget w = widgets.AdminDateWidget(attrs={"size": 20, "class": "myDateField"}) self.assertHTMLEqual( w.render("test", datetime(2007, 12, 1, 9, 30)), '<input value="2007-12-01" type="text" class="myDateField" name="test" ' 'size="20">', ) class AdminTimeWidgetTest(SimpleTestCase): def test_attrs(self): w = widgets.AdminTimeWidget() self.assertHTMLEqual( w.render("test", datetime(2007, 12, 1, 9, 30)), '<input value="09:30:00" type="text" class="vTimeField" name="test" ' 'size="8">', ) # pass attrs to widget w = widgets.AdminTimeWidget(attrs={"size": 20, "class": "myTimeField"}) self.assertHTMLEqual( w.render("test", datetime(2007, 12, 1, 9, 30)), '<input value="09:30:00" type="text" class="myTimeField" name="test" ' 'size="20">', ) class AdminSplitDateTimeWidgetTest(SimpleTestCase): def test_render(self): w = widgets.AdminSplitDateTime() self.assertHTMLEqual( w.render("test", datetime(2007, 12, 1, 9, 30)), '<p class="datetime">' 'Date: <input value="2007-12-01" type="text" class="vDateField" ' 'name="test_0" size="10"><br>' 'Time: <input value="09:30:00" type="text" class="vTimeField" ' 'name="test_1" size="8"></p>', ) def test_localization(self): w = widgets.AdminSplitDateTime() with translation.override("de-at"): w.is_localized = True self.assertHTMLEqual( w.render("test", datetime(2007, 12, 1, 9, 30)), '<p class="datetime">' 'Datum: <input value="01.12.2007" type="text" ' 'class="vDateField" name="test_0"size="10"><br>' 'Zeit: <input value="09:30:00" type="text" class="vTimeField" ' 'name="test_1" size="8"></p>', ) class AdminURLWidgetTest(SimpleTestCase): def test_get_context_validates_url(self): w = widgets.AdminURLFieldWidget() for invalid in ["", "/not/a/full/url/", 'javascript:alert("Danger XSS!")']: with self.subTest(url=invalid): self.assertFalse(w.get_context("name", invalid, {})["url_valid"]) self.assertTrue(w.get_context("name", "http://example.com", {})["url_valid"]) def test_render(self): w = widgets.AdminURLFieldWidget() self.assertHTMLEqual( w.render("test", ""), '<input class="vURLField" name="test" type="url">' ) self.assertHTMLEqual( w.render("test", "http://example.com"), '<p class="url">Currently:<a href="http://example.com">' "http://example.com</a><br>" 'Change:<input class="vURLField" name="test" type="url" ' 'value="http://example.com"></p>', ) def test_render_idn(self): w = widgets.AdminURLFieldWidget() self.assertHTMLEqual( w.render("test", "http://example-äüö.com"), '<p class="url">Currently: <a href="http://xn--example--7za4pnc.com">' "http://example-äüö.com</a><br>" 'Change:<input class="vURLField" name="test" type="url" ' 'value="http://example-äüö.com"></p>', ) def test_render_quoting(self): """ WARNING: This test doesn't use assertHTMLEqual since it will get rid of some escapes which are tested here! """ HREF_RE = re.compile('href="([^"]+)"') VALUE_RE = re.compile('value="([^"]+)"') TEXT_RE = re.compile("<a[^>]+>([^>]+)</a>") w = widgets.AdminURLFieldWidget() output = w.render("test", "http://example.com/<sometag>some-text</sometag>") self.assertEqual( HREF_RE.search(output)[1], "http://example.com/%3Csometag%3Esome-text%3C/sometag%3E", ) self.assertEqual( TEXT_RE.search(output)[1], "http://example.com/&lt;sometag&gt;some-text&lt;/sometag&gt;", ) self.assertEqual( VALUE_RE.search(output)[1], "http://example.com/&lt;sometag&gt;some-text&lt;/sometag&gt;", ) output = w.render("test", "http://example-äüö.com/<sometag>some-text</sometag>") self.assertEqual( HREF_RE.search(output)[1], "http://xn--example--7za4pnc.com/%3Csometag%3Esome-text%3C/sometag%3E", ) self.assertEqual( TEXT_RE.search(output)[1], "http://example-äüö.com/&lt;sometag&gt;some-text&lt;/sometag&gt;", ) self.assertEqual( VALUE_RE.search(output)[1], "http://example-äüö.com/&lt;sometag&gt;some-text&lt;/sometag&gt;", ) output = w.render( "test", 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"' ) self.assertEqual( HREF_RE.search(output)[1], "http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)" "%3C/script%3E%22", ) self.assertEqual( TEXT_RE.search(output)[1], "http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;" "alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;", ) self.assertEqual( VALUE_RE.search(output)[1], "http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;" "alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;", ) class AdminUUIDWidgetTests(SimpleTestCase): def test_attrs(self): w = widgets.AdminUUIDInputWidget() self.assertHTMLEqual( w.render("test", "550e8400-e29b-41d4-a716-446655440000"), '<input value="550e8400-e29b-41d4-a716-446655440000" type="text" ' 'class="vUUIDField" name="test">', ) w = widgets.AdminUUIDInputWidget(attrs={"class": "myUUIDInput"}) self.assertHTMLEqual( w.render("test", "550e8400-e29b-41d4-a716-446655440000"), '<input value="550e8400-e29b-41d4-a716-446655440000" type="text" ' 'class="myUUIDInput" name="test">', ) @override_settings(ROOT_URLCONF="admin_widgets.urls") class AdminFileWidgetTests(TestDataMixin, TestCase): @classmethod def setUpTestData(cls): super().setUpTestData() band = Band.objects.create(name="Linkin Park") cls.album = band.album_set.create( name="Hybrid Theory", cover_art=r"albums\hybrid_theory.jpg" ) def test_render(self): w = widgets.AdminFileWidget() self.assertHTMLEqual( w.render("test", self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> ' '<span class="clearable-file-input">' '<input type="checkbox" name="test-clear" id="test-clear_id"> ' '<label for="test-clear_id">Clear</label></span><br>' 'Change: <input type="file" name="test"></p>' % { "STORAGE_URL": default_storage.url(""), }, ) self.assertHTMLEqual( w.render("test", SimpleUploadedFile("test", b"content")), '<input type="file" name="test">', ) def test_render_required(self): widget = widgets.AdminFileWidget() widget.is_required = True self.assertHTMLEqual( widget.render("test", self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a><br>' 'Change: <input type="file" name="test"></p>' % { "STORAGE_URL": default_storage.url(""), }, ) def test_render_disabled(self): widget = widgets.AdminFileWidget(attrs={"disabled": True}) self.assertHTMLEqual( widget.render("test", self.album.cover_art), '<p class="file-upload">Currently: <a href="%(STORAGE_URL)salbums/' r'hybrid_theory.jpg">albums\hybrid_theory.jpg</a> ' '<span class="clearable-file-input">' '<input type="checkbox" name="test-clear" id="test-clear_id" disabled>' '<label for="test-clear_id">Clear</label></span><br>' 'Change: <input type="file" name="test" disabled></p>' % { "STORAGE_URL": default_storage.url(""), }, ) def test_readonly_fields(self): """ File widgets should render as a link when they're marked "read only." """ self.client.force_login(self.superuser) response = self.client.get( reverse("admin:admin_widgets_album_change", args=(self.album.id,)) ) self.assertContains( response, '<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">' r"albums\hybrid_theory.jpg</a></div>" % {"STORAGE_URL": default_storage.url("")}, html=True, ) self.assertNotContains( response, '<input type="file" name="cover_art" id="id_cover_art">', html=True, ) response = self.client.get(reverse("admin:admin_widgets_album_add")) self.assertContains( response, '<div class="readonly"></div>', html=True, ) @override_settings(ROOT_URLCONF="admin_widgets.urls") class ForeignKeyRawIdWidgetTest(TestCase): def test_render(self): band = Band.objects.create(name="Linkin Park") band.album_set.create( name="Hybrid Theory", cover_art=r"albums\hybrid_theory.jpg" ) rel_uuid = Album._meta.get_field("band").remote_field w = widgets.ForeignKeyRawIdWidget(rel_uuid, widget_admin_site) self.assertHTMLEqual( w.render("test", band.uuid, attrs={}), '<input type="text" name="test" value="%(banduuid)s" ' 'class="vForeignKeyRawIdAdminField vUUIDField">' '<a href="/admin_widgets/band/?_to_field=uuid" class="related-lookup" ' 'id="lookup_id_test" title="Lookup"></a>&nbsp;<strong>' '<a href="/admin_widgets/band/%(bandpk)s/change/">Linkin Park</a>' "</strong>" % {"banduuid": band.uuid, "bandpk": band.pk}, ) rel_id = ReleaseEvent._meta.get_field("album").remote_field w = widgets.ForeignKeyRawIdWidget(rel_id, widget_admin_site) self.assertHTMLEqual( w.render("test", None, attrs={}), '<input type="text" name="test" class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/album/?_to_field=id" class="related-lookup" ' 'id="lookup_id_test" title="Lookup"></a>', ) def test_relations_to_non_primary_key(self): # ForeignKeyRawIdWidget works with fields which aren't related to # the model's primary key. apple = Inventory.objects.create(barcode=86, name="Apple") Inventory.objects.create(barcode=22, name="Pear") core = Inventory.objects.create(barcode=87, name="Core", parent=apple) rel = Inventory._meta.get_field("parent").remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render("test", core.parent_id, attrs={}), '<input type="text" name="test" value="86" ' 'class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/inventory/?_to_field=barcode" ' 'class="related-lookup" id="lookup_id_test" title="Lookup"></a>' '&nbsp;<strong><a href="/admin_widgets/inventory/%(pk)s/change/">' "Apple</a></strong>" % {"pk": apple.pk}, ) def test_fk_related_model_not_in_admin(self): # FK to a model not registered with admin site. Raw ID widget should # have no magnifying glass link. See #16542 big_honeycomb = Honeycomb.objects.create(location="Old tree") big_honeycomb.bee_set.create() rel = Bee._meta.get_field("honeycomb").remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render("honeycomb_widget", big_honeycomb.pk, attrs={}), '<input type="text" name="honeycomb_widget" value="%(hcombpk)s">' "&nbsp;<strong>%(hcomb)s</strong>" % {"hcombpk": big_honeycomb.pk, "hcomb": big_honeycomb}, ) def test_fk_to_self_model_not_in_admin(self): # FK to self, not registered with admin site. Raw ID widget should have # no magnifying glass link. See #16542 subject1 = Individual.objects.create(name="Subject #1") Individual.objects.create(name="Child", parent=subject1) rel = Individual._meta.get_field("parent").remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render("individual_widget", subject1.pk, attrs={}), '<input type="text" name="individual_widget" value="%(subj1pk)s">' "&nbsp;<strong>%(subj1)s</strong>" % {"subj1pk": subject1.pk, "subj1": subject1}, ) def test_proper_manager_for_label_lookup(self): # see #9258 rel = Inventory._meta.get_field("parent").remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) hidden = Inventory.objects.create(barcode=93, name="Hidden", hidden=True) child_of_hidden = Inventory.objects.create( barcode=94, name="Child of hidden", parent=hidden ) self.assertHTMLEqual( w.render("test", child_of_hidden.parent_id, attrs={}), '<input type="text" name="test" value="93" ' ' class="vForeignKeyRawIdAdminField">' '<a href="/admin_widgets/inventory/?_to_field=barcode" ' 'class="related-lookup" id="lookup_id_test" title="Lookup"></a>' '&nbsp;<strong><a href="/admin_widgets/inventory/%(pk)s/change/">' "Hidden</a></strong>" % {"pk": hidden.pk}, ) def test_render_unsafe_limit_choices_to(self): rel = UnsafeLimitChoicesTo._meta.get_field("band").remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render("test", None), '<input type="text" name="test" class="vForeignKeyRawIdAdminField">\n' '<a href="/admin_widgets/band/?name=%22%26%3E%3Cescapeme&amp;' '_to_field=artist_ptr" class="related-lookup" id="lookup_id_test" ' 'title="Lookup"></a>', ) def test_render_fk_as_pk_model(self): rel = VideoStream._meta.get_field("release_event").remote_field w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render("test", None), '<input type="text" name="test" class="vForeignKeyRawIdAdminField">\n' '<a href="/admin_widgets/releaseevent/?_to_field=album" ' 'class="related-lookup" id="lookup_id_test" title="Lookup"></a>', ) @override_settings(ROOT_URLCONF="admin_widgets.urls") class ManyToManyRawIdWidgetTest(TestCase): def test_render(self): band = Band.objects.create(name="Linkin Park") m1 = Member.objects.create(name="Chester") m2 = Member.objects.create(name="Mike") band.members.add(m1, m2) rel = Band._meta.get_field("members").remote_field w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render("test", [m1.pk, m2.pk], attrs={}), ( '<input type="text" name="test" value="%(m1pk)s,%(m2pk)s" ' ' class="vManyToManyRawIdAdminField">' '<a href="/admin_widgets/member/" class="related-lookup" ' ' id="lookup_id_test" title="Lookup"></a>' ) % {"m1pk": m1.pk, "m2pk": m2.pk}, ) self.assertHTMLEqual( w.render("test", [m1.pk]), ( '<input type="text" name="test" value="%(m1pk)s" ' ' class="vManyToManyRawIdAdminField">' '<a href="/admin_widgets/member/" class="related-lookup" ' ' id="lookup_id_test" title="Lookup"></a>' ) % {"m1pk": m1.pk}, ) def test_m2m_related_model_not_in_admin(self): # M2M relationship with model not registered with admin site. Raw ID # widget should have no magnifying glass link. See #16542 consultor1 = Advisor.objects.create(name="Rockstar Techie") c1 = Company.objects.create(name="Doodle") c2 = Company.objects.create(name="Pear") consultor1.companies.add(c1, c2) rel = Advisor._meta.get_field("companies").remote_field w = widgets.ManyToManyRawIdWidget(rel, widget_admin_site) self.assertHTMLEqual( w.render("company_widget1", [c1.pk, c2.pk], attrs={}), '<input type="text" name="company_widget1" value="%(c1pk)s,%(c2pk)s">' % {"c1pk": c1.pk, "c2pk": c2.pk}, ) self.assertHTMLEqual( w.render("company_widget2", [c1.pk]), '<input type="text" name="company_widget2" value="%(c1pk)s">' % {"c1pk": c1.pk}, ) @override_settings(ROOT_URLCONF="admin_widgets.urls") class RelatedFieldWidgetWrapperTests(SimpleTestCase): def test_no_can_add_related(self): rel = Individual._meta.get_field("parent").remote_field w = widgets.AdminRadioSelect() # Used to fail with a name error. w = widgets.RelatedFieldWidgetWrapper(w, rel, widget_admin_site) self.assertFalse(w.can_add_related) def test_select_multiple_widget_cant_change_delete_related(self): rel = Individual._meta.get_field("parent").remote_field widget = forms.SelectMultiple() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) self.assertTrue(wrapper.can_add_related) self.assertFalse(wrapper.can_change_related) self.assertFalse(wrapper.can_delete_related) def test_on_delete_cascade_rel_cant_delete_related(self): rel = Individual._meta.get_field("soulmate").remote_field widget = forms.Select() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) self.assertTrue(wrapper.can_add_related) self.assertTrue(wrapper.can_change_related) self.assertFalse(wrapper.can_delete_related) def test_custom_widget_render(self): class CustomWidget(forms.Select): def render(self, *args, **kwargs): return "custom render output" rel = Album._meta.get_field("band").remote_field widget = CustomWidget() wrapper = widgets.RelatedFieldWidgetWrapper( widget, rel, widget_admin_site, can_add_related=True, can_change_related=True, can_delete_related=True, ) output = wrapper.render("name", "value") self.assertIn("custom render output", output) def test_widget_delegates_value_omitted_from_data(self): class CustomWidget(forms.Select): def value_omitted_from_data(self, data, files, name): return False rel = Album._meta.get_field("band").remote_field widget = CustomWidget() wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.value_omitted_from_data({}, {}, "band"), False) def test_widget_is_hidden(self): rel = Album._meta.get_field("band").remote_field widget = forms.HiddenInput() widget.choices = () wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.is_hidden, True) context = wrapper.get_context("band", None, {}) self.assertIs(context["is_hidden"], True) output = wrapper.render("name", "value") # Related item links are hidden. self.assertNotIn("<a ", output) def test_widget_is_not_hidden(self): rel = Album._meta.get_field("band").remote_field widget = forms.Select() wrapper = widgets.RelatedFieldWidgetWrapper(widget, rel, widget_admin_site) self.assertIs(wrapper.is_hidden, False) context = wrapper.get_context("band", None, {}) self.assertIs(context["is_hidden"], False) output = wrapper.render("name", "value") # Related item links are present. self.assertIn("<a ", output) @override_settings(ROOT_URLCONF="admin_widgets.urls") class AdminWidgetSeleniumTestCase(AdminSeleniumTestCase): available_apps = ["admin_widgets"] + AdminSeleniumTestCase.available_apps def setUp(self): self.u1 = User.objects.create_superuser( username="super", password="secret", email="[email protected]" ) class DateTimePickerSeleniumTests(AdminWidgetSeleniumTestCase): def test_show_hide_date_time_picker_widgets(self): """ Pressing the ESC key or clicking on a widget value closes the date and time picker widgets. """ from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys self.admin_login(username="super", password="secret", login_url="/") # Open a page that has a date and time picker widgets self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_member_add") ) # First, with the date picker widget --------------------------------- cal_icon = self.selenium.find_element(By.ID, "calendarlink0") # The date picker is hidden self.assertFalse( self.selenium.find_element(By.ID, "calendarbox0").is_displayed() ) # Click the calendar icon cal_icon.click() # The date picker is visible self.assertTrue( self.selenium.find_element(By.ID, "calendarbox0").is_displayed() ) # Press the ESC key self.selenium.find_element(By.TAG_NAME, "body").send_keys([Keys.ESCAPE]) # The date picker is hidden again self.assertFalse( self.selenium.find_element(By.ID, "calendarbox0").is_displayed() ) # Click the calendar icon, then on the 15th of current month cal_icon.click() self.selenium.find_element(By.XPATH, "//a[contains(text(), '15')]").click() self.assertFalse( self.selenium.find_element(By.ID, "calendarbox0").is_displayed() ) self.assertEqual( self.selenium.find_element(By.ID, "id_birthdate_0").get_attribute("value"), datetime.today().strftime("%Y-%m-") + "15", ) # Then, with the time picker widget ---------------------------------- time_icon = self.selenium.find_element(By.ID, "clocklink0") # The time picker is hidden self.assertFalse(self.selenium.find_element(By.ID, "clockbox0").is_displayed()) # Click the time icon time_icon.click() # The time picker is visible self.assertTrue(self.selenium.find_element(By.ID, "clockbox0").is_displayed()) self.assertEqual( [ x.text for x in self.selenium.find_elements( By.XPATH, "//ul[@class='timelist']/li/a" ) ], ["Now", "Midnight", "6 a.m.", "Noon", "6 p.m."], ) # Press the ESC key self.selenium.find_element(By.TAG_NAME, "body").send_keys([Keys.ESCAPE]) # The time picker is hidden again self.assertFalse(self.selenium.find_element(By.ID, "clockbox0").is_displayed()) # Click the time icon, then select the 'Noon' value time_icon.click() self.selenium.find_element(By.XPATH, "//a[contains(text(), 'Noon')]").click() self.assertFalse(self.selenium.find_element(By.ID, "clockbox0").is_displayed()) self.assertEqual( self.selenium.find_element(By.ID, "id_birthdate_1").get_attribute("value"), "12:00:00", ) def test_calendar_nonday_class(self): """ Ensure cells that are not days of the month have the `nonday` CSS class. Refs #4574. """ from selenium.webdriver.common.by import By self.admin_login(username="super", password="secret", login_url="/") # Open a page that has a date and time picker widgets self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_member_add") ) # fill in the birth date. self.selenium.find_element(By.ID, "id_birthdate_0").send_keys("2013-06-01") # Click the calendar icon self.selenium.find_element(By.ID, "calendarlink0").click() # get all the tds within the calendar calendar0 = self.selenium.find_element(By.ID, "calendarin0") tds = calendar0.find_elements(By.TAG_NAME, "td") # make sure the first and last 6 cells have class nonday for td in tds[:6] + tds[-6:]: self.assertEqual(td.get_attribute("class"), "nonday") def test_calendar_selected_class(self): """ Ensure cell for the day in the input has the `selected` CSS class. Refs #4574. """ from selenium.webdriver.common.by import By self.admin_login(username="super", password="secret", login_url="/") # Open a page that has a date and time picker widgets self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_member_add") ) # fill in the birth date. self.selenium.find_element(By.ID, "id_birthdate_0").send_keys("2013-06-01") # Click the calendar icon self.selenium.find_element(By.ID, "calendarlink0").click() # get all the tds within the calendar calendar0 = self.selenium.find_element(By.ID, "calendarin0") tds = calendar0.find_elements(By.TAG_NAME, "td") # verify the selected cell selected = tds[6] self.assertEqual(selected.get_attribute("class"), "selected") self.assertEqual(selected.text, "1") def test_calendar_no_selected_class(self): """ Ensure no cells are given the selected class when the field is empty. Refs #4574. """ from selenium.webdriver.common.by import By self.admin_login(username="super", password="secret", login_url="/") # Open a page that has a date and time picker widgets self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_member_add") ) # Click the calendar icon self.selenium.find_element(By.ID, "calendarlink0").click() # get all the tds within the calendar calendar0 = self.selenium.find_element(By.ID, "calendarin0") tds = calendar0.find_elements(By.TAG_NAME, "td") # verify there are no cells with the selected class selected = [td for td in tds if td.get_attribute("class") == "selected"] self.assertEqual(len(selected), 0) def test_calendar_show_date_from_input(self): """ The calendar shows the date from the input field for every locale supported by Django. """ from selenium.webdriver.common.by import By self.selenium.set_window_size(1024, 768) self.admin_login(username="super", password="secret", login_url="/") # Enter test data member = Member.objects.create( name="Bob", birthdate=datetime(1984, 5, 15), gender="M" ) # Get month name translations for every locale month_string = "May" path = os.path.join( os.path.dirname(import_module("django.contrib.admin").__file__), "locale" ) for language_code, language_name in settings.LANGUAGES: try: catalog = gettext.translation("djangojs", path, [language_code]) except OSError: continue if month_string in catalog._catalog: month_name = catalog._catalog[month_string] else: month_name = month_string # Get the expected caption may_translation = month_name expected_caption = "{:s} {:d}".format(may_translation.upper(), 1984) # Test with every locale with override_settings(LANGUAGE_CODE=language_code): # Open a page that has a date picker widget url = reverse("admin:admin_widgets_member_change", args=(member.pk,)) self.selenium.get(self.live_server_url + url) # Click on the calendar icon self.selenium.find_element(By.ID, "calendarlink0").click() # Make sure that the right month and year are displayed self.wait_for_text("#calendarin0 caption", expected_caption) @override_settings(TIME_ZONE="Asia/Singapore") class DateTimePickerShortcutsSeleniumTests(AdminWidgetSeleniumTestCase): def test_date_time_picker_shortcuts(self): """ date/time/datetime picker shortcuts work in the current time zone. Refs #20663. This test case is fairly tricky, it relies on selenium still running the browser in the default time zone "America/Chicago" despite `override_settings` changing the time zone to "Asia/Singapore". """ from selenium.webdriver.common.by import By self.admin_login(username="super", password="secret", login_url="/") error_margin = timedelta(seconds=10) # If we are neighbouring a DST, we add an hour of error margin. tz = zoneinfo.ZoneInfo("America/Chicago") utc_now = datetime.now(zoneinfo.ZoneInfo("UTC")) tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname() tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname() if tz_yesterday != tz_tomorrow: error_margin += timedelta(hours=1) self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_member_add") ) self.selenium.find_element(By.ID, "id_name").send_keys("test") # Click on the "today" and "now" shortcuts. shortcuts = self.selenium.find_elements( By.CSS_SELECTOR, ".field-birthdate .datetimeshortcuts" ) now = datetime.now() for shortcut in shortcuts: shortcut.find_element(By.TAG_NAME, "a").click() # There is a time zone mismatch warning. # Warning: This would effectively fail if the TIME_ZONE defined in the # settings has the same UTC offset as "Asia/Singapore" because the # mismatch warning would be rightfully missing from the page. self.assertCountSeleniumElements(".field-birthdate .timezonewarning", 1) # Submit the form. with self.wait_page_loaded(): self.selenium.find_element(By.NAME, "_save").click() # Make sure that "now" in JavaScript is within 10 seconds # from "now" on the server side. member = Member.objects.get(name="test") self.assertGreater(member.birthdate, now - error_margin) self.assertLess(member.birthdate, now + error_margin) # The above tests run with Asia/Singapore which are on the positive side of # UTC. Here we test with a timezone on the negative side. @override_settings(TIME_ZONE="US/Eastern") class DateTimePickerAltTimezoneSeleniumTests(DateTimePickerShortcutsSeleniumTests): pass class HorizontalVerticalFilterSeleniumTests(AdminWidgetSeleniumTestCase): def setUp(self): super().setUp() self.lisa = Student.objects.create(name="Lisa") self.john = Student.objects.create(name="John") self.bob = Student.objects.create(name="Bob") self.peter = Student.objects.create(name="Peter") self.jenny = Student.objects.create(name="Jenny") self.jason = Student.objects.create(name="Jason") self.cliff = Student.objects.create(name="Cliff") self.arthur = Student.objects.create(name="Arthur") self.school = School.objects.create(name="School of Awesome") def assertActiveButtons( self, mode, field_name, choose, remove, choose_all=None, remove_all=None ): choose_link = "#id_%s_add_link" % field_name choose_all_link = "#id_%s_add_all_link" % field_name remove_link = "#id_%s_remove_link" % field_name remove_all_link = "#id_%s_remove_all_link" % field_name self.assertEqual(self.has_css_class(choose_link, "active"), choose) self.assertEqual(self.has_css_class(remove_link, "active"), remove) if mode == "horizontal": self.assertEqual(self.has_css_class(choose_all_link, "active"), choose_all) self.assertEqual(self.has_css_class(remove_all_link, "active"), remove_all) def execute_basic_operations(self, mode, field_name): from selenium.webdriver.common.by import By original_url = self.selenium.current_url from_box = "#id_%s_from" % field_name to_box = "#id_%s_to" % field_name choose_link = "id_%s_add_link" % field_name choose_all_link = "id_%s_add_all_link" % field_name remove_link = "id_%s_remove_link" % field_name remove_all_link = "id_%s_remove_all_link" % field_name # Initial positions --------------------------------------------------- self.assertSelectOptions( from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ], ) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id)]) self.assertActiveButtons(mode, field_name, False, False, True, True) # Click 'Choose all' -------------------------------------------------- if mode == "horizontal": self.selenium.find_element(By.ID, choose_all_link).click() elif mode == "vertical": # There 's no 'Choose all' button in vertical mode, so individually # select all options and click 'Choose'. for option in self.selenium.find_elements( By.CSS_SELECTOR, from_box + " > option" ): option.click() self.selenium.find_element(By.ID, choose_link).click() self.assertSelectOptions(from_box, []) self.assertSelectOptions( to_box, [ str(self.lisa.id), str(self.peter.id), str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ], ) self.assertActiveButtons(mode, field_name, False, False, False, True) # Click 'Remove all' -------------------------------------------------- if mode == "horizontal": self.selenium.find_element(By.ID, remove_all_link).click() elif mode == "vertical": # There 's no 'Remove all' button in vertical mode, so individually # select all options and click 'Remove'. for option in self.selenium.find_elements( By.CSS_SELECTOR, to_box + " > option" ): option.click() self.selenium.find_element(By.ID, remove_link).click() self.assertSelectOptions( from_box, [ str(self.lisa.id), str(self.peter.id), str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ], ) self.assertSelectOptions(to_box, []) self.assertActiveButtons(mode, field_name, False, False, True, False) # Choose some options ------------------------------------------------ from_lisa_select_option = self.selenium.find_element( By.CSS_SELECTOR, '{} > option[value="{}"]'.format(from_box, self.lisa.id) ) # Check the title attribute is there for tool tips: ticket #20821 self.assertEqual( from_lisa_select_option.get_attribute("title"), from_lisa_select_option.get_attribute("text"), ) self.select_option(from_box, str(self.lisa.id)) self.select_option(from_box, str(self.jason.id)) self.select_option(from_box, str(self.bob.id)) self.select_option(from_box, str(self.john.id)) self.assertActiveButtons(mode, field_name, True, False, True, False) self.selenium.find_element(By.ID, choose_link).click() self.assertActiveButtons(mode, field_name, False, False, True, True) self.assertSelectOptions( from_box, [ str(self.peter.id), str(self.arthur.id), str(self.cliff.id), str(self.jenny.id), ], ) self.assertSelectOptions( to_box, [ str(self.lisa.id), str(self.bob.id), str(self.jason.id), str(self.john.id), ], ) # Check the tooltip is still there after moving: ticket #20821 to_lisa_select_option = self.selenium.find_element( By.CSS_SELECTOR, '{} > option[value="{}"]'.format(to_box, self.lisa.id) ) self.assertEqual( to_lisa_select_option.get_attribute("title"), to_lisa_select_option.get_attribute("text"), ) # Remove some options ------------------------------------------------- self.select_option(to_box, str(self.lisa.id)) self.select_option(to_box, str(self.bob.id)) self.assertActiveButtons(mode, field_name, False, True, True, True) self.selenium.find_element(By.ID, remove_link).click() self.assertActiveButtons(mode, field_name, False, False, True, True) self.assertSelectOptions( from_box, [ str(self.peter.id), str(self.arthur.id), str(self.cliff.id), str(self.jenny.id), str(self.lisa.id), str(self.bob.id), ], ) self.assertSelectOptions(to_box, [str(self.jason.id), str(self.john.id)]) # Choose some more options -------------------------------------------- self.select_option(from_box, str(self.arthur.id)) self.select_option(from_box, str(self.cliff.id)) self.selenium.find_element(By.ID, choose_link).click() self.assertSelectOptions( from_box, [ str(self.peter.id), str(self.jenny.id), str(self.lisa.id), str(self.bob.id), ], ) self.assertSelectOptions( to_box, [ str(self.jason.id), str(self.john.id), str(self.arthur.id), str(self.cliff.id), ], ) # Choose some more options -------------------------------------------- self.select_option(from_box, str(self.peter.id)) self.select_option(from_box, str(self.lisa.id)) # Confirm they're selected after clicking inactive buttons: ticket #26575 self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)]) self.selenium.find_element(By.ID, remove_link).click() self.assertSelectedOptions(from_box, [str(self.peter.id), str(self.lisa.id)]) # Unselect the options ------------------------------------------------ self.deselect_option(from_box, str(self.peter.id)) self.deselect_option(from_box, str(self.lisa.id)) # Choose some more options -------------------------------------------- self.select_option(to_box, str(self.jason.id)) self.select_option(to_box, str(self.john.id)) # Confirm they're selected after clicking inactive buttons: ticket #26575 self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)]) self.selenium.find_element(By.ID, choose_link).click() self.assertSelectedOptions(to_box, [str(self.jason.id), str(self.john.id)]) # Unselect the options ------------------------------------------------ self.deselect_option(to_box, str(self.jason.id)) self.deselect_option(to_box, str(self.john.id)) # Pressing buttons shouldn't change the URL. self.assertEqual(self.selenium.current_url, original_url) def test_basic(self): from selenium.webdriver.common.by import By self.selenium.set_window_size(1024, 768) self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username="super", password="secret", login_url="/") self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_school_change", args=(self.school.id,)) ) self.wait_page_ready() self.execute_basic_operations("vertical", "students") self.execute_basic_operations("horizontal", "alumni") # Save and check that everything is properly stored in the database --- self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.wait_page_ready() self.school = School.objects.get(id=self.school.id) # Reload from database self.assertEqual( list(self.school.students.all()), [self.arthur, self.cliff, self.jason, self.john], ) self.assertEqual( list(self.school.alumni.all()), [self.arthur, self.cliff, self.jason, self.john], ) def test_filter(self): """ Typing in the search box filters out options displayed in the 'from' box. """ from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys self.selenium.set_window_size(1024, 768) self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username="super", password="secret", login_url="/") self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_school_change", args=(self.school.id,)) ) for field_name in ["students", "alumni"]: from_box = "#id_%s_from" % field_name to_box = "#id_%s_to" % field_name choose_link = "id_%s_add_link" % field_name remove_link = "id_%s_remove_link" % field_name input = self.selenium.find_element(By.ID, "id_%s_input" % field_name) # Initial values self.assertSelectOptions( from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ], ) # Typing in some characters filters out non-matching options input.send_keys("a") self.assertSelectOptions( from_box, [str(self.arthur.id), str(self.jason.id)] ) input.send_keys("R") self.assertSelectOptions(from_box, [str(self.arthur.id)]) # Clearing the text box makes the other options reappear input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions( from_box, [str(self.arthur.id), str(self.jason.id)] ) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions( from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ], ) # ----------------------------------------------------------------- # Choosing a filtered option sends it properly to the 'to' box. input.send_keys("a") self.assertSelectOptions( from_box, [str(self.arthur.id), str(self.jason.id)] ) self.select_option(from_box, str(self.jason.id)) self.selenium.find_element(By.ID, choose_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id)]) self.assertSelectOptions( to_box, [ str(self.lisa.id), str(self.peter.id), str(self.jason.id), ], ) self.select_option(to_box, str(self.lisa.id)) self.selenium.find_element(By.ID, remove_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) # Clear text box self.assertSelectOptions( from_box, [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jenny.id), str(self.john.id), str(self.lisa.id), ], ) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) # ----------------------------------------------------------------- # Pressing enter on a filtered option sends it properly to # the 'to' box. self.select_option(to_box, str(self.jason.id)) self.selenium.find_element(By.ID, remove_link).click() input.send_keys("ja") self.assertSelectOptions(from_box, [str(self.jason.id)]) input.send_keys([Keys.ENTER]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE]) # Save and check that everything is properly stored in the database --- with self.wait_page_loaded(): self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click() self.school = School.objects.get(id=self.school.id) # Reload from database self.assertEqual(list(self.school.students.all()), [self.jason, self.peter]) self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter]) def test_back_button_bug(self): """ Some browsers had a bug where navigating away from the change page and then clicking the browser's back button would clear the filter_horizontal/filter_vertical widgets (#13614). """ from selenium.webdriver.common.by import By self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username="super", password="secret", login_url="/") change_url = reverse( "admin:admin_widgets_school_change", args=(self.school.id,) ) self.selenium.get(self.live_server_url + change_url) # Navigate away and go back to the change form page. self.selenium.find_element(By.LINK_TEXT, "Home").click() self.selenium.back() expected_unselected_values = [ str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id), ] expected_selected_values = [str(self.lisa.id), str(self.peter.id)] # Everything is still in place self.assertSelectOptions("#id_students_from", expected_unselected_values) self.assertSelectOptions("#id_students_to", expected_selected_values) self.assertSelectOptions("#id_alumni_from", expected_unselected_values) self.assertSelectOptions("#id_alumni_to", expected_selected_values) def test_refresh_page(self): """ Horizontal and vertical filter widgets keep selected options on page reload (#22955). """ self.school.students.add(self.arthur, self.jason) self.school.alumni.add(self.arthur, self.jason) self.admin_login(username="super", password="secret", login_url="/") change_url = reverse( "admin:admin_widgets_school_change", args=(self.school.id,) ) self.selenium.get(self.live_server_url + change_url) self.assertCountSeleniumElements("#id_students_to > option", 2) # self.selenium.refresh() or send_keys(Keys.F5) does hard reload and # doesn't replicate what happens when a user clicks the browser's # 'Refresh' button. with self.wait_page_loaded(): self.selenium.execute_script("location.reload()") self.assertCountSeleniumElements("#id_students_to > option", 2) class AdminRawIdWidgetSeleniumTests(AdminWidgetSeleniumTestCase): def setUp(self): super().setUp() Band.objects.create(id=42, name="Bogey Blues") Band.objects.create(id=98, name="Green Potatoes") def test_ForeignKey(self): from selenium.webdriver.common.by import By self.admin_login(username="super", password="secret", login_url="/") self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_event_add") ) main_window = self.selenium.current_window_handle # No value has been selected yet self.assertEqual( self.selenium.find_element(By.ID, "id_main_band").get_attribute("value"), "" ) # Open the popup window and click on a band self.selenium.find_element(By.ID, "lookup_id_main_band").click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element(By.LINK_TEXT, "Bogey Blues") self.assertIn("/band/42/", link.get_attribute("href")) link.click() # The field now contains the selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value("#id_main_band", "42") # Reopen the popup window and click on another band self.selenium.find_element(By.ID, "lookup_id_main_band").click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element(By.LINK_TEXT, "Green Potatoes") self.assertIn("/band/98/", link.get_attribute("href")) link.click() # The field now contains the other selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value("#id_main_band", "98") def test_many_to_many(self): from selenium.webdriver.common.by import By self.admin_login(username="super", password="secret", login_url="/") self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_event_add") ) main_window = self.selenium.current_window_handle # No value has been selected yet self.assertEqual( self.selenium.find_element(By.ID, "id_supporting_bands").get_attribute( "value" ), "", ) # Help text for the field is displayed self.assertEqual( self.selenium.find_element( By.CSS_SELECTOR, ".field-supporting_bands div.help" ).text, "Supporting Bands.", ) # Open the popup window and click on a band self.selenium.find_element(By.ID, "lookup_id_supporting_bands").click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element(By.LINK_TEXT, "Bogey Blues") self.assertIn("/band/42/", link.get_attribute("href")) link.click() # The field now contains the selected band's id self.selenium.switch_to.window(main_window) self.wait_for_value("#id_supporting_bands", "42") # Reopen the popup window and click on another band self.selenium.find_element(By.ID, "lookup_id_supporting_bands").click() self.wait_for_and_switch_to_popup() link = self.selenium.find_element(By.LINK_TEXT, "Green Potatoes") self.assertIn("/band/98/", link.get_attribute("href")) link.click() # The field now contains the two selected bands' ids self.selenium.switch_to.window(main_window) self.wait_for_value("#id_supporting_bands", "42,98") class RelatedFieldWidgetSeleniumTests(AdminWidgetSeleniumTestCase): def test_ForeignKey_using_to_field(self): from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select self.admin_login(username="super", password="secret", login_url="/") self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_profile_add") ) main_window = self.selenium.current_window_handle # Click the Add User button to add new self.selenium.find_element(By.ID, "add_id_user").click() self.wait_for_and_switch_to_popup() password_field = self.selenium.find_element(By.ID, "id_password") password_field.send_keys("password") username_field = self.selenium.find_element(By.ID, "id_username") username_value = "newuser" username_field.send_keys(username_value) save_button_css_selector = ".submit-row > input[type=submit]" self.selenium.find_element(By.CSS_SELECTOR, save_button_css_selector).click() self.selenium.switch_to.window(main_window) # The field now contains the new user self.selenium.find_element(By.CSS_SELECTOR, "#id_user option[value=newuser]") self.selenium.find_element(By.ID, "view_id_user").click() self.wait_for_value("#id_username", "newuser") self.selenium.back() # Chrome and Safari don't update related object links when selecting # the same option as previously submitted. As a consequence, the # "pencil" and "eye" buttons remain disable, so select "---------" # first. select = Select(self.selenium.find_element(By.ID, "id_user")) select.select_by_index(0) select.select_by_value("newuser") # Click the Change User button to change it self.selenium.find_element(By.ID, "change_id_user").click() self.wait_for_and_switch_to_popup() username_field = self.selenium.find_element(By.ID, "id_username") username_value = "changednewuser" username_field.clear() username_field.send_keys(username_value) save_button_css_selector = ".submit-row > input[type=submit]" self.selenium.find_element(By.CSS_SELECTOR, save_button_css_selector).click() self.selenium.switch_to.window(main_window) self.selenium.find_element( By.CSS_SELECTOR, "#id_user option[value=changednewuser]" ) self.selenium.find_element(By.ID, "view_id_user").click() self.wait_for_value("#id_username", "changednewuser") self.selenium.back() select = Select(self.selenium.find_element(By.ID, "id_user")) select.select_by_value("changednewuser") # Go ahead and submit the form to make sure it works self.selenium.find_element(By.CSS_SELECTOR, save_button_css_selector).click() self.wait_for_text( "li.success", "The profile “changednewuser” was added successfully." ) profiles = Profile.objects.all() self.assertEqual(len(profiles), 1) self.assertEqual(profiles[0].user.username, username_value) @skipUnless(Image, "Pillow not installed") class ImageFieldWidgetsSeleniumTests(AdminWidgetSeleniumTestCase): def test_clearablefileinput_widget(self): from selenium.webdriver.common.by import By self.admin_login(username="super", password="secret", login_url="/") self.selenium.get( self.live_server_url + reverse("admin:admin_widgets_student_add"), ) photo_input_id = "id_photo" save_and_edit_button_css_selector = "input[value='Save and continue editing']" tests_files_folder = "%s/files" % os.getcwd() clear_checkbox_id = "photo-clear_id" def _submit_and_wait(): with self.wait_page_loaded(): self.selenium.find_element( By.CSS_SELECTOR, save_and_edit_button_css_selector ).click() # Add a student. title_input = self.selenium.find_element(By.ID, "id_name") title_input.send_keys("Joe Doe") photo_input = self.selenium.find_element(By.ID, photo_input_id) photo_input.send_keys(f"{tests_files_folder}/test.png") _submit_and_wait() student = Student.objects.last() self.assertEqual(student.name, "Joe Doe") self.assertEqual(student.photo.name, "photos/test.png") # Uploading non-image files is not supported by Safari with Selenium, # so upload a broken one instead. photo_input = self.selenium.find_element(By.ID, photo_input_id) photo_input.send_keys(f"{tests_files_folder}/brokenimg.png") _submit_and_wait() self.assertEqual( self.selenium.find_element(By.CSS_SELECTOR, ".errorlist li").text, ( "Upload a valid image. The file you uploaded was either not an image " "or a corrupted image." ), ) # "Currently" with "Clear" checkbox and "Change" still shown. cover_field_row = self.selenium.find_element(By.CSS_SELECTOR, ".field-photo") self.assertIn("Currently", cover_field_row.text) self.assertIn("Change", cover_field_row.text) # "Clear" box works. self.selenium.find_element(By.ID, clear_checkbox_id).click() _submit_and_wait() student.refresh_from_db() self.assertEqual(student.name, "Joe Doe") self.assertEqual(student.photo.name, "")
ab6a32c2253714b1b3e596bbe62757d61a1803d43f9a391165044c61bcbd9b78
# Unittests for fixtures. import json import os import re from io import StringIO from pathlib import Path from django.core import management, serializers from django.core.exceptions import ImproperlyConfigured from django.core.serializers.base import DeserializationError from django.db import IntegrityError, transaction from django.db.models import signals from django.test import ( TestCase, TransactionTestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature, ) from .models import ( Absolute, Animal, Article, Book, Child, Circle1, Circle2, Circle3, ExternalDependency, M2MCircular1ThroughAB, M2MCircular1ThroughBC, M2MCircular1ThroughCA, M2MCircular2ThroughAB, M2MComplexA, M2MComplexB, M2MComplexCircular1A, M2MComplexCircular1B, M2MComplexCircular1C, M2MComplexCircular2A, M2MComplexCircular2B, M2MSimpleA, M2MSimpleB, M2MSimpleCircularA, M2MSimpleCircularB, M2MThroughAB, NaturalKeyWithFKDependency, NKChild, Parent, Person, RefToNKChild, Store, Stuff, Thingy, Widget, ) _cur_dir = os.path.dirname(os.path.abspath(__file__)) class TestFixtures(TestCase): def animal_pre_save_check(self, signal, sender, instance, **kwargs): self.pre_save_checks.append( ( "Count = %s (%s)" % (instance.count, type(instance.count)), "Weight = %s (%s)" % (instance.weight, type(instance.weight)), ) ) def test_duplicate_pk(self): """ This is a regression test for ticket #3790. """ # Load a fixture that uses PK=1 management.call_command( "loaddata", "sequence", verbosity=0, ) # Create a new animal. Without a sequence reset, this new object # will take a PK of 1 (on Postgres), and the save will fail. animal = Animal( name="Platypus", latin_name="Ornithorhynchus anatinus", count=2, weight=2.2, ) animal.save() self.assertGreater(animal.id, 1) def test_loaddata_not_found_fields_not_ignore(self): """ Test for ticket #9279 -- Error is raised for entries in the serialized data for fields that have been removed from the database when not ignored. """ with self.assertRaises(DeserializationError): management.call_command( "loaddata", "sequence_extra", verbosity=0, ) def test_loaddata_not_found_fields_ignore(self): """ Test for ticket #9279 -- Ignores entries in the serialized data for fields that have been removed from the database. """ management.call_command( "loaddata", "sequence_extra", ignore=True, verbosity=0, ) self.assertEqual(Animal.specimens.all()[0].name, "Lion") def test_loaddata_not_found_fields_ignore_xml(self): """ Test for ticket #19998 -- Ignore entries in the XML serialized data for fields that have been removed from the model definition. """ management.call_command( "loaddata", "sequence_extra_xml", ignore=True, verbosity=0, ) self.assertEqual(Animal.specimens.all()[0].name, "Wolf") @skipIfDBFeature("interprets_empty_strings_as_nulls") def test_pretty_print_xml(self): """ Regression test for ticket #4558 -- pretty printing of XML fixtures doesn't affect parsing of None values. """ # Load a pretty-printed XML fixture with Nulls. management.call_command( "loaddata", "pretty.xml", verbosity=0, ) self.assertIsNone(Stuff.objects.all()[0].name) self.assertIsNone(Stuff.objects.all()[0].owner) @skipUnlessDBFeature("interprets_empty_strings_as_nulls") def test_pretty_print_xml_empty_strings(self): """ Regression test for ticket #4558 -- pretty printing of XML fixtures doesn't affect parsing of None values. """ # Load a pretty-printed XML fixture with Nulls. management.call_command( "loaddata", "pretty.xml", verbosity=0, ) self.assertEqual(Stuff.objects.all()[0].name, "") self.assertIsNone(Stuff.objects.all()[0].owner) def test_absolute_path(self): """ Regression test for ticket #6436 -- os.path.join will throw away the initial parts of a path if it encounters an absolute path. This means that if a fixture is specified as an absolute path, we need to make sure we don't discover the absolute path in every fixture directory. """ load_absolute_path = os.path.join( os.path.dirname(__file__), "fixtures", "absolute.json" ) management.call_command( "loaddata", load_absolute_path, verbosity=0, ) self.assertEqual(Absolute.objects.count(), 1) def test_relative_path(self, path=["fixtures", "absolute.json"]): relative_path = os.path.join(*path) cwd = os.getcwd() try: os.chdir(_cur_dir) management.call_command( "loaddata", relative_path, verbosity=0, ) finally: os.chdir(cwd) self.assertEqual(Absolute.objects.count(), 1) @override_settings(FIXTURE_DIRS=[os.path.join(_cur_dir, "fixtures_1")]) def test_relative_path_in_fixture_dirs(self): self.test_relative_path(path=["inner", "absolute.json"]) def test_path_containing_dots(self): management.call_command( "loaddata", "path.containing.dots.json", verbosity=0, ) self.assertEqual(Absolute.objects.count(), 1) def test_unknown_format(self): """ Test for ticket #4371 -- Loading data of an unknown format should fail Validate that error conditions are caught correctly """ msg = ( "Problem installing fixture 'bad_fix.ture1': unkn is not a known " "serialization format." ) with self.assertRaisesMessage(management.CommandError, msg): management.call_command( "loaddata", "bad_fix.ture1.unkn", verbosity=0, ) @override_settings(SERIALIZATION_MODULES={"unkn": "unexistent.path"}) def test_unimportable_serializer(self): """ Failing serializer import raises the proper error """ with self.assertRaisesMessage(ImportError, "No module named 'unexistent'"): management.call_command( "loaddata", "bad_fix.ture1.unkn", verbosity=0, ) def test_invalid_data(self): """ Test for ticket #4371 -- Loading a fixture file with invalid data using explicit filename. Test for ticket #18213 -- warning conditions are caught correctly """ msg = "No fixture data found for 'bad_fixture2'. (File format may be invalid.)" with self.assertWarnsMessage(RuntimeWarning, msg): management.call_command( "loaddata", "bad_fixture2.xml", verbosity=0, ) def test_invalid_data_no_ext(self): """ Test for ticket #4371 -- Loading a fixture file with invalid data without file extension. Test for ticket #18213 -- warning conditions are caught correctly """ msg = "No fixture data found for 'bad_fixture2'. (File format may be invalid.)" with self.assertWarnsMessage(RuntimeWarning, msg): management.call_command( "loaddata", "bad_fixture2", verbosity=0, ) def test_empty(self): """ Test for ticket #18213 -- Loading a fixture file with no data output a warning. Previously empty fixture raises an error exception, see ticket #4371. """ msg = "No fixture data found for 'empty'. (File format may be invalid.)" with self.assertWarnsMessage(RuntimeWarning, msg): management.call_command( "loaddata", "empty", verbosity=0, ) def test_error_message(self): """ Regression for #9011 - error message is correct. Change from error to warning for ticket #18213. """ msg = "No fixture data found for 'bad_fixture2'. (File format may be invalid.)" with self.assertWarnsMessage(RuntimeWarning, msg): management.call_command( "loaddata", "bad_fixture2", "animal", verbosity=0, ) def test_pg_sequence_resetting_checks(self): """ Test for ticket #7565 -- PostgreSQL sequence resetting checks shouldn't ascend to parent models when inheritance is used (since they are treated individually). """ management.call_command( "loaddata", "model-inheritance.json", verbosity=0, ) self.assertEqual(Parent.objects.all()[0].id, 1) self.assertEqual(Child.objects.all()[0].id, 1) def test_close_connection_after_loaddata(self): """ Test for ticket #7572 -- MySQL has a problem if the same connection is used to create tables, load data, and then query over that data. To compensate, we close the connection after running loaddata. This ensures that a new connection is opened when test queries are issued. """ management.call_command( "loaddata", "big-fixture.json", verbosity=0, ) articles = Article.objects.exclude(id=9) self.assertEqual( list(articles.values_list("id", flat=True)), [1, 2, 3, 4, 5, 6, 7, 8] ) # Just for good measure, run the same query again. # Under the influence of ticket #7572, this will # give a different result to the previous call. self.assertEqual( list(articles.values_list("id", flat=True)), [1, 2, 3, 4, 5, 6, 7, 8] ) def test_field_value_coerce(self): """ Test for tickets #8298, #9942 - Field values should be coerced into the correct type by the deserializer, not as part of the database write. """ self.pre_save_checks = [] signals.pre_save.connect(self.animal_pre_save_check) try: management.call_command( "loaddata", "animal.xml", verbosity=0, ) self.assertEqual( self.pre_save_checks, [("Count = 42 (<class 'int'>)", "Weight = 1.2 (<class 'float'>)")], ) finally: signals.pre_save.disconnect(self.animal_pre_save_check) def test_dumpdata_uses_default_manager(self): """ Regression for #11286 Dumpdata honors the default manager. Dump the current contents of the database as a JSON fixture """ management.call_command( "loaddata", "animal.xml", verbosity=0, ) management.call_command( "loaddata", "sequence.json", verbosity=0, ) animal = Animal( name="Platypus", latin_name="Ornithorhynchus anatinus", count=2, weight=2.2, ) animal.save() out = StringIO() management.call_command( "dumpdata", "fixtures_regress.animal", format="json", stdout=out, ) # Output order isn't guaranteed, so check for parts data = out.getvalue() # Get rid of artifacts like '000000002' to eliminate the differences # between different Python versions. data = re.sub("0{6,}[0-9]", "", data) animals_data = sorted( [ { "pk": 1, "model": "fixtures_regress.animal", "fields": { "count": 3, "weight": 1.2, "name": "Lion", "latin_name": "Panthera leo", }, }, { "pk": 10, "model": "fixtures_regress.animal", "fields": { "count": 42, "weight": 1.2, "name": "Emu", "latin_name": "Dromaius novaehollandiae", }, }, { "pk": animal.pk, "model": "fixtures_regress.animal", "fields": { "count": 2, "weight": 2.2, "name": "Platypus", "latin_name": "Ornithorhynchus anatinus", }, }, ], key=lambda x: x["pk"], ) data = sorted(json.loads(data), key=lambda x: x["pk"]) self.maxDiff = 1024 self.assertEqual(data, animals_data) def test_proxy_model_included(self): """ Regression for #11428 - Proxy models aren't included when you dumpdata """ out = StringIO() # Create an instance of the concrete class widget = Widget.objects.create(name="grommet") management.call_command( "dumpdata", "fixtures_regress.widget", "fixtures_regress.widgetproxy", format="json", stdout=out, ) self.assertJSONEqual( out.getvalue(), '[{"pk": %d, "model": "fixtures_regress.widget", ' '"fields": {"name": "grommet"}}]' % widget.pk, ) @skipUnlessDBFeature("supports_forward_references") def test_loaddata_works_when_fixture_has_forward_refs(self): """ Forward references cause fixtures not to load in MySQL (InnoDB). """ management.call_command( "loaddata", "forward_ref.json", verbosity=0, ) self.assertEqual(Book.objects.all()[0].id, 1) self.assertEqual(Person.objects.all()[0].id, 4) def test_loaddata_raises_error_when_fixture_has_invalid_foreign_key(self): """ Data with nonexistent child key references raises error. """ with self.assertRaisesMessage(IntegrityError, "Problem installing fixture"): management.call_command( "loaddata", "forward_ref_bad_data.json", verbosity=0, ) @skipUnlessDBFeature("supports_forward_references") @override_settings( FIXTURE_DIRS=[ os.path.join(_cur_dir, "fixtures_1"), os.path.join(_cur_dir, "fixtures_2"), ] ) def test_loaddata_forward_refs_split_fixtures(self): """ Regression for #17530 - should be able to cope with forward references when the fixtures are not in the same files or directories. """ management.call_command( "loaddata", "forward_ref_1.json", "forward_ref_2.json", verbosity=0, ) self.assertEqual(Book.objects.all()[0].id, 1) self.assertEqual(Person.objects.all()[0].id, 4) def test_loaddata_no_fixture_specified(self): """ Error is quickly reported when no fixtures is provided in the command line. """ msg = ( "No database fixture specified. Please provide the path of at least one " "fixture in the command line." ) with self.assertRaisesMessage(management.CommandError, msg): management.call_command( "loaddata", verbosity=0, ) def test_ticket_20820(self): """ Regression for ticket #20820 -- loaddata on a model that inherits from a model with a M2M shouldn't blow up. """ management.call_command( "loaddata", "special-article.json", verbosity=0, ) def test_ticket_22421(self): """ Regression for ticket #22421 -- loaddata on a model that inherits from a grand-parent model with a M2M but via an abstract parent shouldn't blow up. """ management.call_command( "loaddata", "feature.json", verbosity=0, ) def test_loaddata_with_m2m_to_self(self): """ Regression test for ticket #17946. """ management.call_command( "loaddata", "m2mtoself.json", verbosity=0, ) @override_settings( FIXTURE_DIRS=[ os.path.join(_cur_dir, "fixtures_1"), os.path.join(_cur_dir, "fixtures_1"), ] ) def test_fixture_dirs_with_duplicates(self): """ settings.FIXTURE_DIRS cannot contain duplicates in order to avoid repeated fixture loading. """ with self.assertRaisesMessage( ImproperlyConfigured, "settings.FIXTURE_DIRS contains duplicates." ): management.call_command("loaddata", "absolute.json", verbosity=0) @override_settings(FIXTURE_DIRS=[os.path.join(_cur_dir, "fixtures")]) def test_fixture_dirs_with_default_fixture_path(self): """ settings.FIXTURE_DIRS cannot contain a default fixtures directory for application (app/fixtures) in order to avoid repeated fixture loading. """ msg = ( "'%s' is a default fixture directory for the '%s' app " "and cannot be listed in settings.FIXTURE_DIRS." % (os.path.join(_cur_dir, "fixtures"), "fixtures_regress") ) with self.assertRaisesMessage(ImproperlyConfigured, msg): management.call_command("loaddata", "absolute.json", verbosity=0) @override_settings(FIXTURE_DIRS=[Path(_cur_dir) / "fixtures"]) def test_fixture_dirs_with_default_fixture_path_as_pathlib(self): """ settings.FIXTURE_DIRS cannot contain a default fixtures directory for application (app/fixtures) in order to avoid repeated fixture loading. """ msg = ( "'%s' is a default fixture directory for the '%s' app " "and cannot be listed in settings.FIXTURE_DIRS." % (os.path.join(_cur_dir, "fixtures"), "fixtures_regress") ) with self.assertRaisesMessage(ImproperlyConfigured, msg): management.call_command("loaddata", "absolute.json", verbosity=0) @override_settings( FIXTURE_DIRS=[ os.path.join(_cur_dir, "fixtures_1"), os.path.join(_cur_dir, "fixtures_2"), ] ) def test_loaddata_with_valid_fixture_dirs(self): management.call_command( "loaddata", "absolute.json", verbosity=0, ) @override_settings(FIXTURE_DIRS=[Path(_cur_dir) / "fixtures_1"]) def test_fixtures_dir_pathlib(self): management.call_command("loaddata", "inner/absolute.json", verbosity=0) self.assertQuerySetEqual(Absolute.objects.all(), [1], transform=lambda o: o.pk) class NaturalKeyFixtureTests(TestCase): def test_nk_deserialize(self): """ Test for ticket #13030 - Python based parser version natural keys deserialize with fk to inheriting model """ management.call_command( "loaddata", "model-inheritance.json", verbosity=0, ) management.call_command( "loaddata", "nk-inheritance.json", verbosity=0, ) self.assertEqual(NKChild.objects.get(pk=1).data, "apple") self.assertEqual(RefToNKChild.objects.get(pk=1).nk_fk.data, "apple") def test_nk_deserialize_xml(self): """ Test for ticket #13030 - XML version natural keys deserialize with fk to inheriting model """ management.call_command( "loaddata", "model-inheritance.json", verbosity=0, ) management.call_command( "loaddata", "nk-inheritance.json", verbosity=0, ) management.call_command( "loaddata", "nk-inheritance2.xml", verbosity=0, ) self.assertEqual(NKChild.objects.get(pk=2).data, "banana") self.assertEqual(RefToNKChild.objects.get(pk=2).nk_fk.data, "apple") def test_nk_on_serialize(self): """ Natural key requirements are taken into account when serializing models. """ management.call_command( "loaddata", "forward_ref_lookup.json", verbosity=0, ) out = StringIO() management.call_command( "dumpdata", "fixtures_regress.book", "fixtures_regress.person", "fixtures_regress.store", verbosity=0, format="json", use_natural_foreign_keys=True, use_natural_primary_keys=True, stdout=out, ) self.assertJSONEqual( out.getvalue(), """ [{"fields": {"main": null, "name": "Amazon"}, "model": "fixtures_regress.store"}, {"fields": {"main": null, "name": "Borders"}, "model": "fixtures_regress.store"}, {"fields": {"name": "Neal Stephenson"}, "model": "fixtures_regress.person"}, {"pk": 1, "model": "fixtures_regress.book", "fields": {"stores": [["Amazon"], ["Borders"]], "name": "Cryptonomicon", "author": ["Neal Stephenson"]}}] """, ) def test_dependency_sorting(self): """ It doesn't matter what order you mention the models, Store *must* be serialized before then Person, and both must be serialized before Book. """ sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [Book, Person, Store])] ) self.assertEqual(sorted_deps, [Store, Person, Book]) def test_dependency_sorting_2(self): sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [Book, Store, Person])] ) self.assertEqual(sorted_deps, [Store, Person, Book]) def test_dependency_sorting_3(self): sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [Store, Book, Person])] ) self.assertEqual(sorted_deps, [Store, Person, Book]) def test_dependency_sorting_4(self): sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [Store, Person, Book])] ) self.assertEqual(sorted_deps, [Store, Person, Book]) def test_dependency_sorting_5(self): sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [Person, Book, Store])] ) self.assertEqual(sorted_deps, [Store, Person, Book]) def test_dependency_sorting_6(self): sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [Person, Store, Book])] ) self.assertEqual(sorted_deps, [Store, Person, Book]) def test_dependency_sorting_dangling(self): sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [Person, Circle1, Store, Book])] ) self.assertEqual(sorted_deps, [Circle1, Store, Person, Book]) def test_dependency_sorting_tight_circular(self): with self.assertRaisesMessage( RuntimeError, "Can't resolve dependencies for fixtures_regress.Circle1, " "fixtures_regress.Circle2 in serialized app list.", ): serializers.sort_dependencies( [("fixtures_regress", [Person, Circle2, Circle1, Store, Book])] ) def test_dependency_sorting_tight_circular_2(self): with self.assertRaisesMessage( RuntimeError, "Can't resolve dependencies for fixtures_regress.Circle1, " "fixtures_regress.Circle2 in serialized app list.", ): serializers.sort_dependencies( [("fixtures_regress", [Circle1, Book, Circle2])] ) def test_dependency_self_referential(self): with self.assertRaisesMessage( RuntimeError, "Can't resolve dependencies for fixtures_regress.Circle3 in " "serialized app list.", ): serializers.sort_dependencies([("fixtures_regress", [Book, Circle3])]) def test_dependency_sorting_long(self): with self.assertRaisesMessage( RuntimeError, "Can't resolve dependencies for fixtures_regress.Circle1, " "fixtures_regress.Circle2, fixtures_regress.Circle3 in serialized " "app list.", ): serializers.sort_dependencies( [("fixtures_regress", [Person, Circle2, Circle1, Circle3, Store, Book])] ) def test_dependency_sorting_normal(self): sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [Person, ExternalDependency, Book])] ) self.assertEqual(sorted_deps, [Person, Book, ExternalDependency]) def test_normal_pk(self): """ Normal primary keys work on a model with natural key capabilities. """ management.call_command( "loaddata", "non_natural_1.json", verbosity=0, ) management.call_command( "loaddata", "forward_ref_lookup.json", verbosity=0, ) management.call_command( "loaddata", "non_natural_2.xml", verbosity=0, ) books = Book.objects.all() self.assertQuerySetEqual( books, [ "<Book: Cryptonomicon by Neal Stephenson (available at Amazon, " "Borders)>", "<Book: Ender's Game by Orson Scott Card (available at Collins " "Bookstore)>", "<Book: Permutation City by Greg Egan (available at Angus and " "Robertson)>", ], transform=repr, ) class NaturalKeyFixtureOnOtherDatabaseTests(TestCase): databases = {"other"} def test_natural_key_dependencies(self): """ Natural keys with foreing keys in dependencies works in a multiple database setup. """ management.call_command( "loaddata", "nk_with_foreign_key.json", database="other", verbosity=0, ) obj = NaturalKeyWithFKDependency.objects.using("other").get() self.assertEqual(obj.name, "The Lord of the Rings") self.assertEqual(obj.author.name, "J.R.R. Tolkien") class M2MNaturalKeyFixtureTests(TestCase): """Tests for ticket #14426.""" def test_dependency_sorting_m2m_simple(self): """ M2M relations without explicit through models SHOULD count as dependencies Regression test for bugs that could be caused by flawed fixes to #14226, namely if M2M checks are removed from sort_dependencies altogether. """ sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [M2MSimpleA, M2MSimpleB])] ) self.assertEqual(sorted_deps, [M2MSimpleB, M2MSimpleA]) def test_dependency_sorting_m2m_simple_circular(self): """ Resolving circular M2M relations without explicit through models should fail loudly """ with self.assertRaisesMessage( RuntimeError, "Can't resolve dependencies for fixtures_regress.M2MSimpleCircularA, " "fixtures_regress.M2MSimpleCircularB in serialized app list.", ): serializers.sort_dependencies( [("fixtures_regress", [M2MSimpleCircularA, M2MSimpleCircularB])] ) def test_dependency_sorting_m2m_complex(self): """ M2M relations with explicit through models should NOT count as dependencies. The through model itself will have dependencies, though. """ sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [M2MComplexA, M2MComplexB, M2MThroughAB])] ) # Order between M2MComplexA and M2MComplexB doesn't matter. The through # model has dependencies to them though, so it should come last. self.assertEqual(sorted_deps[-1], M2MThroughAB) def test_dependency_sorting_m2m_complex_circular_1(self): """ Circular M2M relations with explicit through models should be serializable """ A, B, C, AtoB, BtoC, CtoA = ( M2MComplexCircular1A, M2MComplexCircular1B, M2MComplexCircular1C, M2MCircular1ThroughAB, M2MCircular1ThroughBC, M2MCircular1ThroughCA, ) sorted_deps = serializers.sort_dependencies( [("fixtures_regress", [A, B, C, AtoB, BtoC, CtoA])] ) # The dependency sorting should not result in an error, and the # through model should have dependencies to the other models and as # such come last in the list. self.assertEqual(sorted_deps[:3], [A, B, C]) self.assertEqual(sorted_deps[3:], [AtoB, BtoC, CtoA]) def test_dependency_sorting_m2m_complex_circular_2(self): """ Circular M2M relations with explicit through models should be serializable This test tests the circularity with explicit natural_key.dependencies """ sorted_deps = serializers.sort_dependencies( [ ( "fixtures_regress", [M2MComplexCircular2A, M2MComplexCircular2B, M2MCircular2ThroughAB], ) ] ) self.assertEqual(sorted_deps[:2], [M2MComplexCircular2A, M2MComplexCircular2B]) self.assertEqual(sorted_deps[2:], [M2MCircular2ThroughAB]) def test_dump_and_load_m2m_simple(self): """ Test serializing and deserializing back models with simple M2M relations """ a = M2MSimpleA.objects.create(data="a") b1 = M2MSimpleB.objects.create(data="b1") b2 = M2MSimpleB.objects.create(data="b2") a.b_set.add(b1) a.b_set.add(b2) out = StringIO() management.call_command( "dumpdata", "fixtures_regress.M2MSimpleA", "fixtures_regress.M2MSimpleB", use_natural_foreign_keys=True, stdout=out, ) for model in [M2MSimpleA, M2MSimpleB]: model.objects.all().delete() objects = serializers.deserialize("json", out.getvalue()) for obj in objects: obj.save() new_a = M2MSimpleA.objects.get_by_natural_key("a") self.assertCountEqual(new_a.b_set.all(), [b1, b2]) class TestTicket11101(TransactionTestCase): available_apps = ["fixtures_regress"] @skipUnlessDBFeature("supports_transactions") def test_ticket_11101(self): """Fixtures can be rolled back (ticket #11101).""" with transaction.atomic(): management.call_command( "loaddata", "thingy.json", verbosity=0, ) self.assertEqual(Thingy.objects.count(), 1) transaction.set_rollback(True) self.assertEqual(Thingy.objects.count(), 0) class TestLoadFixtureFromOtherAppDirectory(TestCase): """ #23612 -- fixtures path should be normalized to allow referencing relative paths on Windows. """ current_dir = os.path.abspath(os.path.dirname(__file__)) # relative_prefix is something like tests/fixtures_regress or # fixtures_regress depending on how runtests.py is invoked. # All path separators must be / in order to be a proper regression test on # Windows, so replace as appropriate. relative_prefix = os.path.relpath(current_dir, os.getcwd()).replace("\\", "/") fixtures = [relative_prefix + "/fixtures/absolute.json"] def test_fixtures_loaded(self): count = Absolute.objects.count() self.assertGreater(count, 0, "Fixtures not loaded properly.")
d3a35b4c40a0af80454d79ce0baedc2094b0d38b94d02ae976871cfa2111b8a2
from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("migrations", "0001_initial"), ] operations = [ migrations.RemoveField( model_name="ipaddressfield", name="ip", ), ]
eec6f20549f6c4f7456c855c5b7f9a9ede7b1f0e53507a523ab11714b78bb3c8
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name="IPAddressField", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("ip", models.IPAddressField(null=True, blank=True)), ], ), ]
5e96578be191d101c4c102f1bbccc8fadae91545f67b5fc15890723c2b0b7df1
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("migrations", "0001_initial"), ] operations = [ migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ( "author", models.ForeignKey("migrations.Author", models.SET_NULL, null=True), ), ], ) ]
9fc036a0be97307299340efe8ea7cca0ba5b50fe8f511279cc1892110f0f3a52
from django.db import migrations, models class Migration(migrations.Migration): operations = [ migrations.CreateModel( "Salamander", [ ("id", models.AutoField(primary_key=True)), ("size", models.IntegerField(default=0)), ("silly_field", models.BooleanField(default=False)), ], ), ]
7647bc915a6784f6becd81970ffcebd18dd31535dad0ce47e0101290d5c3deac
from django.db import migrations, models class Migration(migrations.Migration): initial = False operations = [ migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=255)), ("slug", models.SlugField(null=True)), ("age", models.IntegerField(default=0)), ("silly_field", models.BooleanField(default=False)), ], ), migrations.CreateModel( "Tribble", [ ("id", models.AutoField(primary_key=True)), ("fluffy", models.BooleanField(default=True)), ], ), migrations.AlterUniqueTogether( name="author", unique_together={("name", "slug")}, ), ]
bed7ec857907c5539eedc95ba2caf16467e6305173ba693f34bb910163cd9504
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("auth", "__first__"), ] operations = [ migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ("user", models.ForeignKey("auth.User", models.SET_NULL, null=True)), ], ) ]
d9f9bf5b0e6756afe1d1602f3bd57870d541b67da6c0d314c4a37a6c112b5e4d
from django.db import migrations, models class Migration(migrations.Migration): operations = [ migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=255)), ("slug", models.SlugField(null=True)), ("age", models.IntegerField(default=0)), ("silly_field", models.BooleanField(default=False)), ], ), migrations.CreateModel( "Tribble", [ ("id", models.AutoField(primary_key=True)), ("fluffy", models.BooleanField(default=True)), ], ), ]
96ca3be3dcd85291a9b1a577b7f89aa158f01afea272d55bfe89024f130c2de4
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("migrations", "thefirst"), ("migrations2", "0002_second"), ] operations = [ migrations.DeleteModel("Tribble"), migrations.RemoveField("Author", "silly_field"), migrations.AddField("Author", "rating", models.IntegerField(default=0)), migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ( "author", models.ForeignKey("migrations.Author", models.SET_NULL, null=True), ), ], ), ]
dc86313883eeceba4c2e9eff16afbc7cb59a3d4a7e5507ba770df9353c74c604
from django.db import migrations class Migration(migrations.Migration): initial = True operations = [ migrations.RunSQL(sql="", reverse_sql=""), ]
3d52451d7de7eb4ecd76709a2b40dfc5a99ae2fcd257ddf8926174eeb5646013
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("migrations", "0001_initial"), ] operations = [ migrations.DeleteModel("Tribble"), migrations.RemoveField("Author", "silly_field"), migrations.AddField("Author", "rating", models.IntegerField(default=0)), migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ( "author", models.ForeignKey("migrations.Author", models.SET_NULL, null=True), ), ], ), ]
a7faafa41c77804c799747b5c79ee1e5662454335054ea79d81c488ab1b28bdb
from django.db import migrations, models class Migration(migrations.Migration): initial = True operations = [ migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=255)), ("slug", models.SlugField(null=True)), ("age", models.IntegerField(default=0)), ("silly_field", models.BooleanField(default=False)), ], ), migrations.CreateModel( "Tribble", [ ("id", models.AutoField(primary_key=True)), ("fluffy", models.BooleanField(default=True)), ], ), migrations.AddField( model_name="tribble", name="bool", field=models.BooleanField(default=False), ), migrations.AlterUniqueTogether( name="author", unique_together={("name", "slug")}, ), ]
3020b09450b287938d8739a8081e224c0054a7ca49f3e61348a5f4adf63909e0
from django.db import migrations class Migration(migrations.Migration): dependencies = [("migrations", "5_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
18b1cc6a67a2329ffed2fe117513a8a4b93650901d1323eb4b78cac278c5c1b4
from django.db import migrations class Migration(migrations.Migration): dependencies = [("migrations", "1_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
afd231a41c3ae9ddbe117883143aec02676526a4c2d386066e4d159cf3257b1b
from django.db import migrations class Migration(migrations.Migration): operations = [migrations.RunPython(migrations.RunPython.noop)]
1584aca654ea8e99edd09fa0ea891be40cc44b3ee6765fccc21531a2416f510c
from django.db import migrations class Migration(migrations.Migration): dependencies = [("migrations", "3_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
6537218d39777b81834b6cce8147d83651d2b58a3e9781826ea892da9148bbb9
from django.db import migrations class Migration(migrations.Migration): dependencies = [("migrations", "4_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
be4bd67d3ef5a32774e8508620acd787d5b1982ec1c391802454217377c094ac
from django.db import migrations class Migration(migrations.Migration): replaces = [ ("migrations", "3_auto"), ("migrations", "4_auto"), ("migrations", "5_auto"), ] dependencies = [("migrations", "2_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
c0350bfb915a1329e48fe3ce1c7368b2a956cded0f3855244ff017f6dbefa7a2
from django.db import migrations class Migration(migrations.Migration): dependencies = [("migrations", "6_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
31db7d8a3aaef682f9b7f56bf4dde6abbfcff681b71484779652a962a3bbc95d
from django.db import migrations class Migration(migrations.Migration): dependencies = [("migrations", "2_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
62aff685e76862ad229ee6d55a282517ad2b254798bae25b4cfb9f55b1fe195f
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.DeleteModel("Tribble"), migrations.RemoveField("Author", "silly_field"), migrations.AddField("Author", "rating", models.IntegerField(default=0)), migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ( "author", models.ForeignKey("migrations.Author", models.SET_NULL, null=True), ), ], ), ]
d23ed38febcbbd77f1fc9e0f59128381fd0cf2705e2854753e08b068194564e9
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("migrations", "0001_initial")] operations = [ migrations.CreateModel( "Something", [ ("id", models.AutoField(primary_key=True)), ], ) ]
47daf05b12e53abf99159c52d192af07555793127472618fded535e69a36f21a
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("migrations", "0001_initial")] operations = [ migrations.DeleteModel("Tribble"), migrations.RemoveField("Author", "silly_field"), migrations.AddField("Author", "rating", models.IntegerField(default=0)), migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ( "author", models.ForeignKey("migrations.Author", models.SET_NULL, null=True), ), ], ), ]
f79b18121549adb03695eef07e324d8618c1c01e3fe7d4f0ac7c58a1371b769f
from django.db import migrations class Migration(migrations.Migration): initial = True operations = []
e9c8a28c5e1682292121e03d7c8a44e316dd1a7080c7943e453471b7eb9cc731
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("migrations", "0001_initial"), ] operations = [ migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ], ), migrations.RunSQL( ["SELECT * FROM migrations_book"], ["SELECT * FROM migrations_salamander"] ), ]
43eb924ba14ea21a46c2ce3e0784b45892cad7dc389a07f40005a42ff6cac967
from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("migrations", "0003_third"), ] operations = [migrations.RunSQL("SELECT * FROM migrations_author WHERE id = 1")]
43e12127a63c158a38586bc64710d958d7e0f4d110ce4061703b8433066ee082
from django.db import migrations, models def grow_tail(x, y): """Grow salamander tail.""" pass def shrink_tail(x, y): """Shrink salamander tail.""" pass class Migration(migrations.Migration): initial = True operations = [ migrations.CreateModel( "Salamander", [ ("id", models.AutoField(primary_key=True)), ("tail", models.IntegerField(default=0)), ("silly_field", models.BooleanField(default=False)), ], ), migrations.RunPython(grow_tail, shrink_tail), ]
9a136b355857fd40430b7131c770c68c8fb44d6c6ba90da58b7aa8457ec7a483
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("migrations", "0002_second"), ] operations = [ migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ], ), migrations.RunSQL( ["SELECT * FROM migrations_author"], ["SELECT * FROM migrations_book"] ), ]
41a47c42949325fb586d2ee88460da8d07b1d62738990249e71f6f5c05a8d07a
from django.db import migrations, models class Migration(migrations.Migration): operations = [ migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=255)), ], ), migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ( "author", models.ForeignKey("migrations.Author", models.SET_NULL, null=True), ), ], ), ]
498e9c32f9255f352729f16e6d19244f38a79c124c34b1d96953dc4f64610453
from django.db import migrations, models class Migration(migrations.Migration): replaces = [ ("migrations", "0001_initial"), ("migrations", "0002_second"), ] operations = [ migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=255)), ("slug", models.SlugField(null=True)), ("age", models.IntegerField(default=0)), ("rating", models.IntegerField(default=0)), ], ), migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ( "author", models.ForeignKey("migrations.Author", models.SET_NULL, null=True), ), ], ), ]
d3d524e3cfed71f393e4179070182caaacd1daec56e1214005dca0f64ade169d
from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ("migrations", "0001_initial"), ] operations = [ migrations.AddField("Author", "rating", models.IntegerField(default=0)), migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ( "author", models.ForeignKey("migrations.Author", models.SET_NULL, null=True), ), ], ), ]
be3e427d1d9388c18be0ced13b3e42e157e29b816fe5e0e40bb4ffa7c83b20ec
from django.db import migrations, models class Migration(migrations.Migration): initial = True operations = [ migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=255)), ("slug", models.SlugField(null=True)), ("age", models.IntegerField(default=0)), ("silly_field", models.BooleanField(default=False)), ], ), migrations.CreateModel( "Tribble", [ ("id", models.AutoField(primary_key=True)), ("fluffy", models.BooleanField(default=True)), ], ), migrations.AlterUniqueTogether( name="author", unique_together={("name", "slug")}, ), ]
42788176637138ab408b5e9e5203bce3982d42f4d934241d9f8d06718eb97eb0
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name="SillyModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("silly_field", models.BooleanField(default=False)), ], options={}, bases=(models.Model,), ), ]
4969845cc37fde6ed6d980e38a1e1f327106e2a97d6937c7a8e4c1b2c5223af2
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("migrations", "0002_second"), ] operations = [ migrations.CreateModel( name="ModelWithCustomBase", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ], options={}, bases=(models.Model,), ), migrations.CreateModel( name="UnmigratedModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ], options={}, bases=(models.Model,), ), migrations.DeleteModel( name="Author", ), migrations.DeleteModel( name="Book", ), ]
510ce908e455a9103cbf8bc5bb2ed5a6bf1aa95a0040caa07479e257de284d8c
from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ("migrations", "0001_initial"), ] operations = [ migrations.AddField( model_name="task", name="projects", field=models.ManyToManyField(to="Project"), ), ]
f7e1d68a27c386e3c81ae970a224df5a6ae08e620b4b60658a88a122a1ce9579
from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="Project", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ], ), migrations.CreateModel( name="Task", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ], ), migrations.AddField( model_name="project", name="tasks", field=models.ManyToManyField(to="Task"), ), ]
1491d504b8c7c115da32ede3d9ae8366f72b754760f20507ac685fdfb59a14ae
from django.db import migrations, models class Migration(migrations.Migration): initial = True operations = [ migrations.CreateModel( name="Entry", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("title", models.CharField(max_length=255)), ], ), ]
e44dc7a291eba2f9d523226806f29b541febc4df3eba02ff732d855bba289176
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("author_app", "0001_initial"), ] operations = [ migrations.CreateModel( name="Book", fields=[ ( "id", models.AutoField( serialize=False, auto_created=True, primary_key=True ), ), ("title", models.CharField(max_length=50)), ("author", models.ForeignKey("author_app.Author", models.CASCADE)), ], ), ]
f24b0f3da4ac21aed6bbb3ce705a350342a7ddbb3e0cedf33f10d186f10be0c9
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("author_app", "0001_initial"), ("book_app", "0001_initial"), # Forces the book table to alter the FK ] operations = [ migrations.AlterField( model_name="author", name="id", field=models.CharField(max_length=10, primary_key=True), ), ]
fe2a3498a2b2e699114bbad734ee93d5538e9c3e31367502792aaab2d52966c1
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name="Author", fields=[ ( "id", models.AutoField( serialize=False, auto_created=True, primary_key=True ), ), ("name", models.CharField(max_length=50)), ], ), ]
dc294f4834c4c9457aaae393694544966796b7bb88d873a1897ee5c8833bed9c
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("unspecified_app_with_conflict", "0001_initial")] operations = [ migrations.CreateModel( "Something", [ ("id", models.AutoField(primary_key=True)), ], ) ]
d201a46507baf87af3fed3080e8ee120fc7e2184b99c18233c06cce16c63b1cd
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("unspecified_app_with_conflict", "0001_initial")] operations = [ migrations.DeleteModel("Tribble"), migrations.RemoveField("Author", "silly_field"), migrations.AddField("Author", "rating", models.IntegerField(default=0)), migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ], ), ]
f43c25eb594faddeefdc830e36e256123140318d9c0eccebb3a6030374c449e5
from django.db import migrations, models class Migration(migrations.Migration): operations = [ migrations.CreateModel( "OtherAuthor", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=255)), ("slug", models.SlugField(null=True)), ("age", models.IntegerField(default=0)), ("silly_field", models.BooleanField(default=False)), ], ), ]
901a6a4e33a87656543a44f9f7493eebf627c0dc12696e403329f5b2a92200f1
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("conflicting_app_with_dependencies", "0001_initial"), ] operations = [ migrations.CreateModel( "Something", [ ("id", models.AutoField(primary_key=True)), ], ) ]
e6b8bd88bc1a4500bb9e951e5f222f4b3e4ed4e69f1c52c883cc9a475713fc80
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("conflicting_app_with_dependencies", "0001_initial"), ("migrated_app", "0001_initial"), ] operations = [ migrations.DeleteModel("Tribble"), migrations.RemoveField("Author", "silly_field"), migrations.AddField("Author", "rating", models.IntegerField(default=0)), migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ], ), ]
3c93093045dc813d477deaaa95271fde67a9e9f00f12fa97731301be2922867d
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("mutate_state_b", "0001_initial"), ] operations = [ migrations.SeparateDatabaseAndState( [], [ migrations.CreateModel( name="A", fields=[ ( "id", models.AutoField( serialize=False, verbose_name="ID", auto_created=True, primary_key=True, ), ), ], ), ], ) ]
ef73c25e3c3b5219e698ab0a09d1b69399798f0cb44a18c710f4bab2f1826abe
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("lookuperror_a", "0002_a2"), ("lookuperror_b", "0001_initial"), ] operations = [ migrations.CreateModel( name="B2", fields=[ ( "id", models.AutoField( primary_key=True, verbose_name="ID", auto_created=True, serialize=False, ), ), ("a1", models.ForeignKey("lookuperror_a.A1", models.CASCADE)), ], ), ]
18650ff071c9f0dc8eaecd2bf8b6ecff6c3f0db8c9dd106a3d90af1570892e89
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("lookuperror_b", "0002_b2"), ] operations = [ migrations.CreateModel( name="B3", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, primary_key=True, auto_created=True, ), ), ], ), ]
f1e42ed2814473516370884bf43c05523687cd616d75c6e5d3eefadc99b1a772
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name="B1", fields=[ ( "id", models.AutoField( serialize=False, auto_created=True, primary_key=True, verbose_name="ID", ), ), ], ), ]
cfc3acee3a32f62329313ac49bf47f2c411112c1aa76d6059fbc08762158d6d8
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("lookuperror_a", "0001_initial"), ] operations = [ migrations.CreateModel( name="A2", fields=[ ( "id", models.AutoField( verbose_name="ID", primary_key=True, serialize=False, auto_created=True, ), ), ], ), ]
808a492f5ca79b46a3f27bb08b6225083a45f0b05e1577f151522a9ac9815d2c
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("lookuperror_a", "0003_a3"), ] operations = [ migrations.CreateModel( name="A4", fields=[ ( "id", models.AutoField( auto_created=True, serialize=False, verbose_name="ID", primary_key=True, ), ), ], ), ]
c6ae1aacbffe7439615d604fab53fe568334adf92f0d8b90d3408473fe57b874
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name="A1", fields=[ ( "id", models.AutoField( serialize=False, verbose_name="ID", auto_created=True, primary_key=True, ), ), ], ), ]
8858a43f7adf3d42c09b9346ef44045e5c4f39edfd3a5cae776834001aeba26c
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("lookuperror_c", "0002_c2"), ("lookuperror_b", "0002_b2"), ("lookuperror_a", "0002_a2"), ] operations = [ migrations.CreateModel( name="A3", fields=[ ( "id", models.AutoField( serialize=False, auto_created=True, primary_key=True, verbose_name="ID", ), ), ("b2", models.ForeignKey("lookuperror_b.B2", models.CASCADE)), ("c2", models.ForeignKey("lookuperror_c.C2", models.CASCADE)), ], ), ]
b886f29a319002082712c6876cdd5446e285d150c5476bb8b7167a78f94b06fd
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("mutate_state_b", "0001_initial"), ] operations = [ migrations.SeparateDatabaseAndState( [], [ migrations.AddField( model_name="B", name="added", field=models.TextField(), ), ], ) ]
94cb49170dacb362bfa1b1faf1f918b7dd1d6e6f9ecdf4cda37dfb55f5394498
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.SeparateDatabaseAndState( [], [ migrations.CreateModel( name="B", fields=[ ( "id", models.AutoField( serialize=False, verbose_name="ID", auto_created=True, primary_key=True, ), ), ], ), ], ) ]
3cea62c276298ff8c42c8ad50a198e5967397cf81a4a5cf853f5be9db9b7a98a
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("lookuperror_a", "0002_a2"), ("lookuperror_c", "0001_initial"), ] operations = [ migrations.CreateModel( name="C2", fields=[ ( "id", models.AutoField( auto_created=True, verbose_name="ID", primary_key=True, serialize=False, ), ), ("a1", models.ForeignKey("lookuperror_a.A1", models.CASCADE)), ], ), ]
cd0e713b174ebdadfbbe8deab90d7485e895b63e8e8cf2f1cbfbe887a0322d58
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("lookuperror_c", "0002_c2"), ] operations = [ migrations.CreateModel( name="C3", fields=[ ( "id", models.AutoField( auto_created=True, serialize=False, verbose_name="ID", primary_key=True, ), ), ], ), ]
c2f1822ddb1e147e44bfd3d1fdf527753e5aa7b322a382ae441cee317ef508ce
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name="C1", fields=[ ( "id", models.AutoField( serialize=False, verbose_name="ID", auto_created=True, primary_key=True, ), ), ], ), ]
2a884dcfed966a01440c301a69d1c40fb4b2a8ac57aa0342f35a963afad13fe9
from django.db import migrations class Migration(migrations.Migration): dependencies = [("app1", "1_auto")]
0757fa730ee872362f1c826e3eb82de66c4f6de4e4bb45a7281c1df13af51556
from django.db import migrations class Migration(migrations.Migration): dependencies = [("app1", "2_squashed_3")]
24a5887ec9ea467752f95a620e4342dc636c843dec86b4f23ca54ed754dd438e
from django.db import migrations class Migration(migrations.Migration): replaces = [ ("app1", "2_auto"), ("app1", "3_auto"), ] dependencies = [("app1", "1_auto"), ("app2", "1_squashed_2")]
bb06ecb595aa628f6a4036afcec81a4b61b4f42dedf018b7bc8fb4ce0dd1712a
from django.db import migrations class Migration(migrations.Migration): dependencies = [("app1", "2_auto"), ("app2", "2_auto")]
88e2c062902e2f4440feee86ede4cafe350a237a9bd7605b499b4b08930151e1
from django.db import migrations class Migration(migrations.Migration): dependencies = [("app2", "1_auto")]
46a54e4f1d53688b0e9a6a19d23853f2889f8f26034f0833a6d714774feffc5a
from django.db import migrations class Migration(migrations.Migration): replaces = [ ("app2", "1_auto"), ("app2", "2_auto"), ] dependencies = [("app1", "1_auto")]
83152c11b0c7c83c0d868da8613e3e9de8e2976fecf2af1b801605a4d473fd8f
from django.db import migrations class Migration(migrations.Migration): dependencies = [("app1", "1_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
24b6561ecd8b1d547074b4cb5091600d59195a9e6e5a7b0ba1a055ef7cf6a362
from django.db import migrations class Migration(migrations.Migration): dependencies = [("app1", "3_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
0abc448a3f77bdcd03547ec9c2ae7fb9c4200e4a432d86a225d8840fe0d8b036
from django.db import migrations class Migration(migrations.Migration): replaces = [ ("app1", "2_auto"), ("app1", "3_auto"), ] dependencies = [("app1", "1_auto"), ("app2", "2_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
efbaff3077c05094c51ca4905f8f136316546e105506caf0e484feddbc2d0848
from django.db import migrations class Migration(migrations.Migration): dependencies = [("app1", "2_auto"), ("app2", "2_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
ca2fbd51a8db9c0d8c66bb79a4469654d011cb7d2c7e2328795f3be69cf6cc7e
from django.db import migrations class Migration(migrations.Migration): dependencies = [("app2", "1_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
70147167c3d745726c476b39bd32b1efb44c8976b5aa0039901a52672d961c80
from django.db import migrations class Migration(migrations.Migration): replaces = [ ("app2", "1_auto"), ("app2", "2_auto"), ] dependencies = [("app1", "1_auto")] operations = [migrations.RunPython(migrations.RunPython.noop)]
f8aba98453ff48de246fc6ed7a5975196122bb98db7b41fe9d0d6fe8444c2cf6
from django.db import migrations def add_book(apps, schema_editor): apps.get_model("migration_test_data_persistence", "Book").objects.using( schema_editor.connection.alias, ).create( title="I Love Django", ) class Migration(migrations.Migration): dependencies = [("migration_test_data_persistence", "0001_initial")] operations = [ migrations.RunPython( add_book, ), ]
5f64435a2408e47a0da2e39074f455be387ccfec0ba4856cb9229e58d0437ce8
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name="Book", fields=[ ( "id", models.AutoField( verbose_name="ID", primary_key=True, serialize=False, auto_created=True, ), ), ("title", models.CharField(max_length=100)), ], options={}, bases=(models.Model,), ), ]
e2606c1ca5f33ea1c562b82e1e4153b113e498d222d83184a1917762e159b9f0
from unittest import mock from django.db import connection, migrations try: from django.contrib.postgres.operations import ( BloomExtension, BtreeGinExtension, BtreeGistExtension, CITextExtension, CreateExtension, CryptoExtension, HStoreExtension, TrigramExtension, UnaccentExtension, ) except ImportError: BloomExtension = mock.Mock() BtreeGinExtension = mock.Mock() BtreeGistExtension = mock.Mock() CITextExtension = mock.Mock() CreateExtension = mock.Mock() HStoreExtension = mock.Mock() TrigramExtension = mock.Mock() UnaccentExtension = mock.Mock() needs_crypto_extension = False else: needs_crypto_extension = ( connection.vendor == "postgresql" and not connection.features.is_postgresql_13 ) class Migration(migrations.Migration): operations = [ BloomExtension(), BtreeGinExtension(), BtreeGistExtension(), CITextExtension(), # Ensure CreateExtension quotes extension names by creating one with a # dash in its name. CreateExtension("uuid-ossp"), # CryptoExtension is required for RandomUUID() on PostgreSQL < 13. CryptoExtension() if needs_crypto_extension else mock.Mock(), HStoreExtension(), TrigramExtension(), UnaccentExtension(), ]
21337a5c15fe91c9d2d52d8023c47fd240d99de0f2ea03bb7abaccee734047ef
from django.db import migrations, models from ..fields import ( ArrayField, BigIntegerRangeField, CICharField, CIEmailField, CITextField, DateRangeField, DateTimeRangeField, DecimalRangeField, EnumField, HStoreField, IntegerRangeField, SearchVectorField, ) from ..models import TagField class Migration(migrations.Migration): dependencies = [ ("postgres_tests", "0001_setup_extensions"), ] operations = [ migrations.CreateModel( name="CharArrayModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("field", ArrayField(models.CharField(max_length=10), size=None)), ], options={ "required_db_vendor": "postgresql", }, bases=(models.Model,), ), migrations.CreateModel( name="DateTimeArrayModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("datetimes", ArrayField(models.DateTimeField(), size=None)), ("dates", ArrayField(models.DateField(), size=None)), ("times", ArrayField(models.TimeField(), size=None)), ], options={ "required_db_vendor": "postgresql", }, bases=(models.Model,), ), migrations.CreateModel( name="HStoreModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("field", HStoreField(blank=True, null=True)), ("array_field", ArrayField(HStoreField(), null=True)), ], options={ "required_db_vendor": "postgresql", }, bases=(models.Model,), ), migrations.CreateModel( name="OtherTypesArrayModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "ips", ArrayField(models.GenericIPAddressField(), size=None, default=list), ), ("uuids", ArrayField(models.UUIDField(), size=None, default=list)), ( "decimals", ArrayField( models.DecimalField(max_digits=5, decimal_places=2), size=None, default=list, ), ), ("tags", ArrayField(TagField(), blank=True, null=True, size=None)), ( "json", ArrayField(models.JSONField(default=dict), default=list, size=None), ), ("int_ranges", ArrayField(IntegerRangeField(), null=True, blank=True)), ( "bigint_ranges", ArrayField(BigIntegerRangeField(), null=True, blank=True), ), ], options={ "required_db_vendor": "postgresql", }, bases=(models.Model,), ), migrations.CreateModel( name="IntegerArrayModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "field", ArrayField( models.IntegerField(), blank=True, default=list, size=None ), ), ], options={ "required_db_vendor": "postgresql", }, bases=(models.Model,), ), migrations.CreateModel( name="NestedIntegerArrayModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "field", ArrayField(ArrayField(models.IntegerField(), size=None), size=None), ), ], options={ "required_db_vendor": "postgresql", }, bases=(models.Model,), ), migrations.CreateModel( name="NullableIntegerArrayModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "field", ArrayField(models.IntegerField(), size=None, null=True, blank=True), ), ( "field_nested", ArrayField( ArrayField(models.IntegerField(null=True), size=None), size=None, null=True, ), ), ("order", models.IntegerField(null=True)), ], options={ "required_db_vendor": "postgresql", }, bases=(models.Model,), ), migrations.CreateModel( name="CharFieldModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("field", models.CharField(max_length=64)), ], options=None, bases=None, ), migrations.CreateModel( name="TextFieldModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("field", models.TextField()), ], options=None, bases=None, ), migrations.CreateModel( name="SmallAutoFieldModel", fields=[ ( "id", models.SmallAutoField(serialize=False, primary_key=True), ), ], options=None, ), migrations.CreateModel( name="BigAutoFieldModel", fields=[ ( "id", models.BigAutoField(serialize=False, primary_key=True), ), ], options=None, ), migrations.CreateModel( name="Scene", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("scene", models.TextField()), ("setting", models.CharField(max_length=255)), ], options=None, bases=None, ), migrations.CreateModel( name="Character", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("name", models.CharField(max_length=255)), ], options=None, bases=None, ), # RemovedInDjango51Warning. migrations.CreateModel( name="CITestModel", fields=[ ( "name", CICharField(primary_key=True, serialize=False, max_length=255), ), ("email", CIEmailField()), ("description", CITextField()), ("array_field", ArrayField(CITextField(), null=True)), ], options={ "required_db_vendor": "postgresql", }, bases=None, ), migrations.CreateModel( name="Line", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "scene", models.ForeignKey("postgres_tests.Scene", on_delete=models.CASCADE), ), ( "character", models.ForeignKey( "postgres_tests.Character", on_delete=models.CASCADE ), ), ("dialogue", models.TextField(blank=True, null=True)), ("dialogue_search_vector", SearchVectorField(blank=True, null=True)), ( "dialogue_config", models.CharField(max_length=100, blank=True, null=True), ), ], options={ "required_db_vendor": "postgresql", }, bases=None, ), migrations.CreateModel( name="LineSavedSearch", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "line", models.ForeignKey("postgres_tests.Line", on_delete=models.CASCADE), ), ("query", models.CharField(max_length=100)), ], options={ "required_db_vendor": "postgresql", }, ), migrations.CreateModel( name="AggregateTestModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("boolean_field", models.BooleanField(null=True)), ("char_field", models.CharField(max_length=30, blank=True)), ("text_field", models.TextField(blank=True)), ("integer_field", models.IntegerField(null=True)), ("json_field", models.JSONField(null=True)), ], options={ "required_db_vendor": "postgresql", }, ), migrations.CreateModel( name="StatTestModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("int1", models.IntegerField()), ("int2", models.IntegerField()), ( "related_field", models.ForeignKey( "postgres_tests.AggregateTestModel", models.SET_NULL, null=True, ), ), ], options={ "required_db_vendor": "postgresql", }, ), migrations.CreateModel( name="NowTestModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("when", models.DateTimeField(null=True, default=None)), ], ), migrations.CreateModel( name="UUIDTestModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("uuid", models.UUIDField(default=None, null=True)), ], ), migrations.CreateModel( name="RangesModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("ints", IntegerRangeField(null=True, blank=True)), ("bigints", BigIntegerRangeField(null=True, blank=True)), ("decimals", DecimalRangeField(null=True, blank=True)), ("timestamps", DateTimeRangeField(null=True, blank=True)), ("timestamps_inner", DateTimeRangeField(null=True, blank=True)), ( "timestamps_closed_bounds", DateTimeRangeField(null=True, blank=True, default_bounds="[]"), ), ("dates", DateRangeField(null=True, blank=True)), ("dates_inner", DateRangeField(null=True, blank=True)), ], options={"required_db_vendor": "postgresql"}, bases=(models.Model,), ), migrations.CreateModel( name="RangeLookupsModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "parent", models.ForeignKey( "postgres_tests.RangesModel", models.SET_NULL, blank=True, null=True, ), ), ("integer", models.IntegerField(blank=True, null=True)), ("big_integer", models.BigIntegerField(blank=True, null=True)), ("float", models.FloatField(blank=True, null=True)), ("timestamp", models.DateTimeField(blank=True, null=True)), ("date", models.DateField(blank=True, null=True)), ("small_integer", models.SmallIntegerField(blank=True, null=True)), ( "decimal_field", models.DecimalField( max_digits=5, decimal_places=2, blank=True, null=True ), ), ], options={ "required_db_vendor": "postgresql", }, bases=(models.Model,), ), migrations.CreateModel( name="ArrayEnumModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "array_of_enums", ArrayField(EnumField(max_length=20), size=None), ), ], options={ "required_db_vendor": "postgresql", }, bases=(models.Model,), ), migrations.CreateModel( name="Room", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("number", models.IntegerField(unique=True)), ], ), migrations.CreateModel( name="HotelReservation", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ("room", models.ForeignKey("postgres_tests.Room", models.CASCADE)), ("datespan", DateRangeField()), ("start", models.DateTimeField()), ("end", models.DateTimeField()), ("cancelled", models.BooleanField(default=False)), ("requirements", models.JSONField(blank=True, null=True)), ], options={ "required_db_vendor": "postgresql", }, ), ]
f17df6eed1d68af907cf5eb5364fa947d2de93e1bcb8dcd5cce084477d0da2a5
import django.contrib.postgres.fields from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ("postgres_tests", "0001_initial"), ] operations = [ migrations.AddField( model_name="integerarraydefaultmodel", name="field_2", field=django.contrib.postgres.fields.ArrayField( models.IntegerField(), default=[], size=None ), preserve_default=False, ), ]
68ed4bceedb0330259cf26d2d80590d346f242adabe923f0e4a10507a396b4df
import django.contrib.postgres.fields from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name="IntegerArrayDefaultModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "field", django.contrib.postgres.fields.ArrayField( models.IntegerField(), size=None ), ), ], options={}, bases=(models.Model,), ), ]
99324ed2f452a7e1ec155e06d1259340bd5d5f722e95a34c524055b3e98d3636
import django.contrib.postgres.fields from django.db import migrations, models class Migration(migrations.Migration): dependencies = [] operations = [ migrations.CreateModel( name="CharTextArrayIndexModel", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True, ), ), ( "char", django.contrib.postgres.fields.ArrayField( models.CharField(max_length=10), db_index=True, size=100 ), ), ("char2", models.CharField(max_length=11, db_index=True)), ( "text", django.contrib.postgres.fields.ArrayField( models.TextField(), db_index=True ), ), ], options={}, bases=(models.Model,), ), ]
0beeef84d5be74663b603c84d7f822bc6a00a6f60d1edf35a622d649c6f73c29
from django.db import migrations def assert_foo_contenttype_not_cached(apps, schema_editor): ContentType = apps.get_model("contenttypes", "ContentType") try: content_type = ContentType.objects.get_by_natural_key( "contenttypes_tests", "foo" ) except ContentType.DoesNotExist: pass else: if not ContentType.objects.filter( app_label="contenttypes_tests", model="foo" ).exists(): raise AssertionError( "The contenttypes_tests.Foo ContentType should not be cached." ) elif content_type.model != "foo": raise AssertionError( "The cached contenttypes_tests.Foo ContentType should have " "its model set to 'foo'." ) class Migration(migrations.Migration): dependencies = [ ("contenttypes_tests", "0001_initial"), ] operations = [ migrations.RenameModel("Foo", "RenamedFoo"), migrations.RunPython( assert_foo_contenttype_not_cached, migrations.RunPython.noop ), ]
e9acb86efdce7360ba43bf94a7caf962b3384ae801d0d9869d09c8c8fa374ac8
from django.db import migrations, models class Migration(migrations.Migration): operations = [ migrations.CreateModel( "Foo", [ ("id", models.AutoField(primary_key=True)), ], ), ]